$ A B C D E F G H I J K L M N O P Q R S T U V W X Y Z _ 

$

$(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.LDA
 
$(Param<T>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
$(Param<T>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
$(Param<T>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.DCT
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.IDF
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Interaction
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.NGram
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.PCA
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormula
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
$(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.Pipeline
 
$(Param<T>) - Static method in class org.apache.spark.ml.PipelineModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
$(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
$(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
$(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
$(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
$(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
$colon$bslash(B, Function2<A, B, B>) - Static method in class org.apache.spark.sql.types.StructType
 
$colon$plus(B, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
$div$colon(B, Function2<B, A, B>) - Static method in class org.apache.spark.sql.types.StructType
 
$greater(A) - Static method in class org.apache.spark.sql.types.Decimal
 
$greater(A) - Static method in class org.apache.spark.storage.RDDInfo
 
$greater$eq(A) - Static method in class org.apache.spark.sql.types.Decimal
 
$greater$eq(A) - Static method in class org.apache.spark.storage.RDDInfo
 
$less(A) - Static method in class org.apache.spark.sql.types.Decimal
 
$less(A) - Static method in class org.apache.spark.storage.RDDInfo
 
$less$eq(A) - Static method in class org.apache.spark.sql.types.Decimal
 
$less$eq(A) - Static method in class org.apache.spark.storage.RDDInfo
 
$minus$greater(T) - Static method in class org.apache.spark.ml.param.DoubleParam
 
$minus$greater(T) - Static method in class org.apache.spark.ml.param.FloatParam
 
$plus$colon(B, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
$plus$eq(T) - Static method in class org.apache.spark.Accumulator
Deprecated.
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.api.r.RRDD
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
$plus$plus(RDD<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
$plus$plus(GenTraversableOnce<B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
$plus$plus$colon(TraversableOnce<B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
$plus$plus$colon(Traversable<B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
$plus$plus$eq(R) - Static method in class org.apache.spark.Accumulator
Deprecated.
 

A

abs(Column) - Static method in class org.apache.spark.sql.functions
Computes the absolute value.
abs() - Method in class org.apache.spark.sql.types.Decimal
 
absent() - Static method in class org.apache.spark.api.java.Optional
 
AbsoluteError - Class in org.apache.spark.mllib.tree.loss
:: DeveloperApi :: Class for absolute error loss calculation (for regression).
AbsoluteError() - Constructor for class org.apache.spark.mllib.tree.loss.AbsoluteError
 
accept(Parsers) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
accept(ES, Function1<ES, List<Object>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
accept(String, PartialFunction<Object, U>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
acceptIf(Function1<Object, Object>, Function1<Object, String>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
acceptMatch(String, PartialFunction<Object, U>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
acceptSeq(ES, Function1<ES, Iterable<Object>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
accId() - Method in class org.apache.spark.CleanAccum
 
Accumulable<R,T> - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
Accumulable(R, AccumulableParam<R, T>) - Constructor for class org.apache.spark.Accumulable
Deprecated.
 
accumulable(T, AccumulableParam<T, R>) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulable shared variable of the given type, to which tasks can "add" values with add.
accumulable(T, String, AccumulableParam<T, R>) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulable shared variable of the given type, to which tasks can "add" values with add.
accumulable(R, AccumulableParam<R, T>) - Method in class org.apache.spark.SparkContext
Deprecated.
use AccumulatorV2. Since 2.0.0.
accumulable(R, String, AccumulableParam<R, T>) - Method in class org.apache.spark.SparkContext
Deprecated.
use AccumulatorV2. Since 2.0.0.
accumulableCollection(R, Function1<R, Growable<T>>, ClassTag<R>) - Method in class org.apache.spark.SparkContext
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulableInfo - Class in org.apache.spark.scheduler
:: DeveloperApi :: Information about an Accumulable modified during a task or stage.
AccumulableInfo - Class in org.apache.spark.status.api.v1
 
accumulableInfoFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
accumulableInfoToJson(AccumulableInfo) - Static method in class org.apache.spark.util.JsonProtocol
 
AccumulableParam<R,T> - Interface in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
accumulables() - Method in class org.apache.spark.scheduler.StageInfo
Terminal values of accumulables updated during this stage, including all the user-defined accumulators.
accumulables() - Method in class org.apache.spark.scheduler.TaskInfo
Intermediate updates to accumulables during this task.
accumulables() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
Accumulator<T> - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
accumulator(int) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator integer variable, which tasks can "add" values to using the add method.
accumulator(int, String) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator integer variable, which tasks can "add" values to using the add method.
accumulator(double) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator double variable, which tasks can "add" values to using the add method.
accumulator(double, String) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator double variable, which tasks can "add" values to using the add method.
accumulator(T, AccumulatorParam<T>) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator variable of a given type, which tasks can "add" values to using the add method.
accumulator(T, String, AccumulatorParam<T>) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator variable of a given type, which tasks can "add" values to using the add method.
accumulator(T, AccumulatorParam<T>) - Method in class org.apache.spark.SparkContext
Deprecated.
use AccumulatorV2. Since 2.0.0.
accumulator(T, String, AccumulatorParam<T>) - Method in class org.apache.spark.SparkContext
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorContext - Class in org.apache.spark.util
An internal class used to track accumulators by Spark itself.
AccumulatorContext() - Constructor for class org.apache.spark.util.AccumulatorContext
 
AccumulatorParam<T> - Interface in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorParam.DoubleAccumulatorParam$ - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorParam.DoubleAccumulatorParam$() - Constructor for class org.apache.spark.AccumulatorParam.DoubleAccumulatorParam$
Deprecated.
 
AccumulatorParam.FloatAccumulatorParam$ - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorParam.FloatAccumulatorParam$() - Constructor for class org.apache.spark.AccumulatorParam.FloatAccumulatorParam$
Deprecated.
 
AccumulatorParam.IntAccumulatorParam$ - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorParam.IntAccumulatorParam$() - Constructor for class org.apache.spark.AccumulatorParam.IntAccumulatorParam$
Deprecated.
 
AccumulatorParam.LongAccumulatorParam$ - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorParam.LongAccumulatorParam$() - Constructor for class org.apache.spark.AccumulatorParam.LongAccumulatorParam$
Deprecated.
 
AccumulatorParam.StringAccumulatorParam$ - Class in org.apache.spark
Deprecated.
use AccumulatorV2. Since 2.0.0.
AccumulatorParam.StringAccumulatorParam$() - Constructor for class org.apache.spark.AccumulatorParam.StringAccumulatorParam$
Deprecated.
 
accumulatorUpdates() - Method in class org.apache.spark.status.api.v1.StageData
 
accumulatorUpdates() - Method in class org.apache.spark.status.api.v1.TaskData
 
AccumulatorV2<IN,OUT> - Class in org.apache.spark.util
The base class for accumulators, that can accumulate inputs of type IN, and produce output of type OUT.
AccumulatorV2() - Constructor for class org.apache.spark.util.AccumulatorV2
 
accumUpdates() - Method in class org.apache.spark.ExceptionFailure
 
accumUpdates() - Method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
accuracy() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns accuracy (equals to the total number of correctly classified instances out of the total number of instances.)
accuracy() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns accuracy
acos(Column) - Static method in class org.apache.spark.sql.functions
Computes the cosine inverse of the given value; the returned angle is in the range 0.0 through pi.
acos(String) - Static method in class org.apache.spark.sql.functions
Computes the cosine inverse of the given column; the returned angle is in the range 0.0 through pi.
active() - Method in class org.apache.spark.sql.ContinuousQueryManager
Returns a list of active queries associated with this SQLContext
active() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
ACTIVE() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
activeJobs() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
activeStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
activeStorageStatusList() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
activeStorageStatusList() - Method in class org.apache.spark.ui.storage.StorageListener
 
activeTasks() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
add(T) - Method in class org.apache.spark.Accumulable
Deprecated.
Add more data to this accumulator / accumulable
add(T) - Static method in class org.apache.spark.Accumulator
Deprecated.
 
add(org.apache.spark.ml.feature.Instance) - Method in class org.apache.spark.ml.classification.LogisticAggregator
Add a new training instance to this LogisticAggregator, and update the loss and gradient of the objective function.
add(AFTPoint) - Method in class org.apache.spark.ml.regression.AFTAggregator
Add a new training data to this AFTAggregator, and update the loss and gradient of the objective function.
add(org.apache.spark.ml.feature.Instance) - Method in class org.apache.spark.ml.regression.LeastSquaresAggregator
Add a new training instance to this LeastSquaresAggregator, and update the loss and gradient of the objective function.
add(double[], MultivariateGaussian[], ExpectationSum, Vector<Object>) - Static method in class org.apache.spark.mllib.clustering.ExpectationSum
 
add(Vector) - Method in class org.apache.spark.mllib.feature.IDF.DocumentFrequencyAggregator
Adds a new document.
add(BlockMatrix) - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Adds the given block matrix other to this block matrix: this + other.
add(Vector) - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Add a new sample to this summarizer, and update the statistical summary.
add(StructField) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new field.
add(String, DataType) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new nullable field with no metadata.
add(String, DataType, boolean) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new field with no metadata.
add(String, DataType, boolean, Metadata) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new field and specifying metadata.
add(String, String) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new nullable field with no metadata where the dataType is specified as a String.
add(String, String, boolean) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new field with no metadata where the dataType is specified as a String.
add(String, String, boolean, Metadata) - Method in class org.apache.spark.sql.types.StructType
Creates a new StructType by adding a new field and specifying metadata where the dataType is specified as a String.
add(long, long) - Static method in class org.apache.spark.streaming.util.RawTextHelper
 
add(IN) - Method in class org.apache.spark.util.AccumulatorV2
Takes the inputs and accumulates.
add(Double) - Method in class org.apache.spark.util.DoubleAccumulator
Adds v to the accumulator, i.e.
add(double) - Method in class org.apache.spark.util.DoubleAccumulator
Adds v to the accumulator, i.e.
add(T) - Method in class org.apache.spark.util.LegacyAccumulatorWrapper
 
add(T) - Method in class org.apache.spark.util.ListAccumulator
 
add(Long) - Method in class org.apache.spark.util.LongAccumulator
Adds v to the accumulator, i.e.
add(long) - Method in class org.apache.spark.util.LongAccumulator
Adds v to the accumulator, i.e.
add(Object) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by one.
add(Object, long) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by count.
add_months(Column, int) - Static method in class org.apache.spark.sql.functions
Returns the date that is numMonths after startDate.
addAccumulator(R, T) - Method in interface org.apache.spark.AccumulableParam
Deprecated.
Add additional data to the accumulator value.
addAccumulator(T, T) - Method in interface org.apache.spark.AccumulatorParam
Deprecated.
 
addAppArgs(String...) - Method in class org.apache.spark.launcher.SparkLauncher
Adds command line arguments for the application.
addBinary(byte[]) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by one.
addBinary(byte[], long) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by count.
addDockerInfo(Protos.ContainerInfo.Builder, String, Option<List<Protos.Volume>>, Option<Protos.ContainerInfo.DockerInfo.Network>, Option<List<Protos.ContainerInfo.DockerInfo.PortMapping>>) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
Construct a DockerInfo structure and insert it into a ContainerInfo
addFile(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Add a file to be downloaded with this Spark job on every node.
addFile(String) - Method in class org.apache.spark.launcher.SparkLauncher
Adds a file to be submitted with the application.
addFile(String) - Method in class org.apache.spark.SparkContext
Add a file to be downloaded with this Spark job on every node.
addFile(String, boolean) - Method in class org.apache.spark.SparkContext
Add a file to be downloaded with this Spark job on every node.
addFilters(Seq<ServletContextHandler>, SparkConf) - Static method in class org.apache.spark.ui.JettyUtils
Add filters, if any, to the given list of ServletContextHandlers
addGrid(Param<T>, Iterable<T>) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Adds a param with multiple values (overwrites if the input param exists).
addGrid(DoubleParam, double[]) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Adds a double param with multiple values.
addGrid(IntParam, int[]) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Adds a int param with multiple values.
addGrid(FloatParam, float[]) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Adds a float param with multiple values.
addGrid(LongParam, long[]) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Adds a long param with multiple values.
addGrid(BooleanParam) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Adds a boolean param with true and false.
addInPlace(R, R) - Method in interface org.apache.spark.AccumulableParam
Deprecated.
Merge two accumulated values together.
addInPlace(double, double) - Method in class org.apache.spark.AccumulatorParam.DoubleAccumulatorParam$
Deprecated.
 
addInPlace(float, float) - Method in class org.apache.spark.AccumulatorParam.FloatAccumulatorParam$
Deprecated.
 
addInPlace(int, int) - Method in class org.apache.spark.AccumulatorParam.IntAccumulatorParam$
Deprecated.
 
addInPlace(long, long) - Method in class org.apache.spark.AccumulatorParam.LongAccumulatorParam$
Deprecated.
 
addInPlace(String, String) - Method in class org.apache.spark.AccumulatorParam.StringAccumulatorParam$
Deprecated.
 
addIntercept() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
addIntercept() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
addIntercept() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Whether to add intercept (default: false).
addIntercept() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
addIntercept() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
addIntercept() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
addIntercept_$eq(boolean) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
addIntercept_$eq(boolean) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
addIntercept_$eq(boolean) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
addIntercept_$eq(boolean) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
addIntercept_$eq(boolean) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
addJar(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Adds a JAR dependency for all tasks to be executed on this SparkContext in the future.
addJar(String) - Method in class org.apache.spark.launcher.SparkLauncher
Adds a jar file to be submitted with the application.
addJar(String) - Method in class org.apache.spark.SparkContext
Adds a JAR dependency for all tasks to be executed on this SparkContext in the future.
addListener(SparkAppHandle.Listener) - Method in interface org.apache.spark.launcher.SparkAppHandle
Adds a listener to be notified of changes to the handle's information.
addListener(ContinuousQueryListener) - Method in class org.apache.spark.sql.ContinuousQueryManager
Register a ContinuousQueryListener to receive up-calls for life cycle events of ContinuousQueries.
addLocalConfiguration(String, int, int, int, JobConf) - Static method in class org.apache.spark.rdd.HadoopRDD
Add Hadoop configuration specific to a single partition and attempt.
addLong(long) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by one.
addLong(long, long) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by count.
addPartToPGroup(Partition, PartitionGroup) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
addPyFile(String) - Method in class org.apache.spark.launcher.SparkLauncher
Adds a python file / zip / egg to be submitted with the application.
address() - Method in class org.apache.spark.status.api.v1.RDDDataDistribution
 
addShutdownHook(Function0<BoxedUnit>) - Static method in class org.apache.spark.util.ShutdownHookManager
Adds a shutdown hook with default priority.
addShutdownHook(int, Function0<BoxedUnit>) - Static method in class org.apache.spark.util.ShutdownHookManager
Adds a shutdown hook with the given priority.
addSparkArg(String) - Method in class org.apache.spark.launcher.SparkLauncher
Adds a no-value argument to the Spark invocation.
addSparkArg(String, String) - Method in class org.apache.spark.launcher.SparkLauncher
Adds an argument with a value to the Spark invocation.
addSparkListener(org.apache.spark.scheduler.SparkListenerInterface) - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Register a listener to receive up-calls from events that happen during execution.
addStreamingListener(StreamingListener) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Add a StreamingListener object for receiving system events related to streaming.
addStreamingListener(StreamingListener) - Method in class org.apache.spark.streaming.StreamingContext
Add a StreamingListener object for receiving system events related to streaming.
addString(StringBuilder, String, String, String) - Static method in class org.apache.spark.sql.types.StructType
 
addString(StringBuilder, String) - Static method in class org.apache.spark.sql.types.StructType
 
addString(StringBuilder) - Static method in class org.apache.spark.sql.types.StructType
 
addString(String) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by one.
addString(String, long) - Method in class org.apache.spark.util.sketch.CountMinSketch
Increments item's count by count.
addSuppressed(Throwable) - Static method in exception org.apache.spark.sql.AnalysisException
 
addSuppressed(Throwable) - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
addTaskCompletionListener(TaskCompletionListener) - Method in class org.apache.spark.TaskContext
Adds a (Java friendly) listener to be executed on task completion.
addTaskCompletionListener(Function1<TaskContext, BoxedUnit>) - Method in class org.apache.spark.TaskContext
Adds a listener in the form of a Scala closure to be executed on task completion.
addTaskFailureListener(TaskFailureListener) - Method in class org.apache.spark.TaskContext
Adds a listener to be executed on task failure.
addTaskFailureListener(Function2<TaskContext, Throwable, BoxedUnit>) - Method in class org.apache.spark.TaskContext
Adds a listener to be executed on task failure.
AFTAggregator - Class in org.apache.spark.ml.regression
AFTAggregator computes the gradient and loss for a AFT loss function, as used in AFT survival regression for samples in sparse or dense vector in a online fashion.
AFTAggregator(DenseVector<Object>, boolean, double[]) - Constructor for class org.apache.spark.ml.regression.AFTAggregator
 
AFTCostFun - Class in org.apache.spark.ml.regression
AFTCostFun implements Breeze's DiffFunction[T] for AFT cost.
AFTCostFun(RDD<AFTPoint>, boolean, double[]) - Constructor for class org.apache.spark.ml.regression.AFTCostFun
 
AFTSurvivalRegression - Class in org.apache.spark.ml.regression
:: Experimental :: Fit a parametric survival regression model named accelerated failure time (AFT) model (https://en.wikipedia.org/wiki/Accelerated_failure_time_model) based on the Weibull distribution of the survival time.
AFTSurvivalRegression(String) - Constructor for class org.apache.spark.ml.regression.AFTSurvivalRegression
 
AFTSurvivalRegression() - Constructor for class org.apache.spark.ml.regression.AFTSurvivalRegression
 
AFTSurvivalRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental :: Model produced by AFTSurvivalRegression.
agg(Column, Column...) - Method in class org.apache.spark.sql.Dataset
Aggregates on the entire Dataset without groups.
agg(Tuple2<String, String>, Seq<Tuple2<String, String>>) - Method in class org.apache.spark.sql.Dataset
(Scala-specific) Aggregates on the entire Dataset without groups.
agg(Map<String, String>) - Method in class org.apache.spark.sql.Dataset
(Scala-specific) Aggregates on the entire Dataset without groups.
agg(Map<String, String>) - Method in class org.apache.spark.sql.Dataset
(Java-specific) Aggregates on the entire Dataset without groups.
agg(Column, Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Aggregates on the entire Dataset without groups.
agg(TypedColumn<V, U1>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Computes the given aggregation, returning a Dataset of tuples for each unique key and the result of computing this aggregation over all elements in the group.
agg(TypedColumn<V, U1>, TypedColumn<V, U2>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Computes the given aggregations, returning a Dataset of tuples for each unique key and the result of computing these aggregations over all elements in the group.
agg(TypedColumn<V, U1>, TypedColumn<V, U2>, TypedColumn<V, U3>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Computes the given aggregations, returning a Dataset of tuples for each unique key and the result of computing these aggregations over all elements in the group.
agg(TypedColumn<V, U1>, TypedColumn<V, U2>, TypedColumn<V, U3>, TypedColumn<V, U4>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Computes the given aggregations, returning a Dataset of tuples for each unique key and the result of computing these aggregations over all elements in the group.
agg(Column, Column...) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute aggregates by specifying a series of aggregate columns.
agg(Tuple2<String, String>, Seq<Tuple2<String, String>>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
(Scala-specific) Compute aggregates by specifying a map from column name to aggregate methods.
agg(Map<String, String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
(Scala-specific) Compute aggregates by specifying a map from column name to aggregate methods.
agg(Map<String, String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
(Java-specific) Compute aggregates by specifying a map from column name to aggregate methods.
agg(Column, Seq<Column>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute aggregates by specifying a series of aggregate columns.
aggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Static method in class org.apache.spark.api.java.JavaRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Aggregate the elements of each partition, and then the results for all the partitions, using given combine functions and a neutral "zero value".
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
aggregate(U, Function2<U, T, U>, Function2<U, U, U>, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Aggregate the elements of each partition, and then the results for all the partitions, using given combine functions and a neutral "zero value".
aggregate(Function0<B>, Function2<B, A, B>, Function2<B, B, B>) - Static method in class org.apache.spark.sql.types.StructType
 
aggregateByKey(U, Partitioner, Function2<U, V, U>, Function2<U, U, U>) - Method in class org.apache.spark.api.java.JavaPairRDD
Aggregate the values of each key, using given combine functions and a neutral "zero value".
aggregateByKey(U, int, Function2<U, V, U>, Function2<U, U, U>) - Method in class org.apache.spark.api.java.JavaPairRDD
Aggregate the values of each key, using given combine functions and a neutral "zero value".
aggregateByKey(U, Function2<U, V, U>, Function2<U, U, U>) - Method in class org.apache.spark.api.java.JavaPairRDD
Aggregate the values of each key, using given combine functions and a neutral "zero value".
aggregateByKey(U, Partitioner, Function2<U, V, U>, Function2<U, U, U>, ClassTag<U>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Aggregate the values of each key, using given combine functions and a neutral "zero value".
aggregateByKey(U, int, Function2<U, V, U>, Function2<U, U, U>, ClassTag<U>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Aggregate the values of each key, using given combine functions and a neutral "zero value".
aggregateByKey(U, Function2<U, V, U>, Function2<U, U, U>, ClassTag<U>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Aggregate the values of each key, using given combine functions and a neutral "zero value".
AggregatedDialect - Class in org.apache.spark.sql.jdbc
AggregatedDialect can unify multiple dialects into one virtual Dialect.
AggregatedDialect(List<JdbcDialect>) - Constructor for class org.apache.spark.sql.jdbc.AggregatedDialect
 
aggregateMessages(Function1<EdgeContext<VD, ED, A>, BoxedUnit>, Function2<A, A, A>, TripletFields, ClassTag<A>) - Method in class org.apache.spark.graphx.Graph
Aggregates values from the neighboring edges and vertices of each vertex.
aggregateMessages(Function1<EdgeContext<VD, ED, A>, BoxedUnit>, Function2<A, A, A>, TripletFields, ClassTag<A>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
aggregateMessages$default$3() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
aggregateMessagesWithActiveSet(Function1<EdgeContext<VD, ED, A>, BoxedUnit>, Function2<A, A, A>, TripletFields, Option<Tuple2<VertexRDD<?>, EdgeDirection>>, ClassTag<A>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
aggregateUsingIndex(RDD<Tuple2<Object, VD2>>, Function2<VD2, VD2, VD2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
aggregateUsingIndex(RDD<Tuple2<Object, VD2>>, Function2<VD2, VD2, VD2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.VertexRDD
Aggregates vertices in messages that have the same ids using reduceFunc, returning a VertexRDD co-indexed with this.
AggregatingEdgeContext<VD,ED,A> - Class in org.apache.spark.graphx.impl
 
AggregatingEdgeContext(Function2<A, A, A>, Object, BitSet) - Constructor for class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
Aggregator<K,V,C> - Class in org.apache.spark
:: DeveloperApi :: A set of functions used to aggregate data.
Aggregator(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>) - Constructor for class org.apache.spark.Aggregator
 
aggregator() - Method in class org.apache.spark.ShuffleDependency
 
Aggregator<IN,BUF,OUT> - Class in org.apache.spark.sql.expressions
A base class for user-defined aggregations, which can be used in Dataset operations to take all of the elements of a group and reduce them to a single value.
Aggregator() - Constructor for class org.apache.spark.sql.expressions.Aggregator
 
aggUntyped(Seq<TypedColumn<?, ?>>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Internal helper function for building typed aggregations that return tuples.
aic(RDD<Tuple3<Object, Object, Object>>, double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
aic(RDD<Tuple3<Object, Object, Object>>, double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
aic(RDD<Tuple3<Object, Object, Object>>, double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
aic(RDD<Tuple3<Object, Object, Object>>, double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
aic() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Akaike's "An Information Criterion"(AIC) for the fitted model.
Algo - Class in org.apache.spark.mllib.tree.configuration
:: Experimental :: Enum to select the algorithm for the decision tree
Algo() - Constructor for class org.apache.spark.mllib.tree.configuration.Algo
 
algo() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
algo() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
 
algo() - Method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
algo() - Method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
algorithm() - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
 
algorithm() - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
The algorithm to use for updating.
algorithm() - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
 
alias(String) - Method in class org.apache.spark.sql.Column
Gives the column an alias.
alias(String) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with an alias set.
alias(Symbol) - Method in class org.apache.spark.sql.Dataset
(Scala-specific) Returns a new Dataset with an alias set.
All - Static variable in class org.apache.spark.graphx.TripletFields
Expose all the fields (source, edge, and destination).
allAttributes() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
AllJobsCancelled - Class in org.apache.spark.scheduler
 
AllJobsCancelled() - Constructor for class org.apache.spark.scheduler.AllJobsCancelled
 
AllReceiverIds - Class in org.apache.spark.streaming.scheduler
A message used by ReceiverTracker to ask all receiver's ids still stored in ReceiverTrackerEndpoint.
AllReceiverIds() - Constructor for class org.apache.spark.streaming.scheduler.AllReceiverIds
 
alpha() - Static method in class org.apache.spark.ml.recommendation.ALS
 
alpha() - Method in class org.apache.spark.mllib.random.WeibullGenerator
 
ALS - Class in org.apache.spark.ml.recommendation
:: Experimental :: Alternating Least Squares (ALS) matrix factorization.
ALS(String) - Constructor for class org.apache.spark.ml.recommendation.ALS
 
ALS() - Constructor for class org.apache.spark.ml.recommendation.ALS
 
ALS - Class in org.apache.spark.mllib.recommendation
Alternating Least Squares matrix factorization.
ALS() - Constructor for class org.apache.spark.mllib.recommendation.ALS
Constructs an ALS instance with default parameters: {numBlocks: -1, rank: 10, iterations: 10, lambda: 0.01, implicitPrefs: false, alpha: 1.0}.
ALS.InBlock$ - Class in org.apache.spark.ml.recommendation
 
ALS.InBlock$() - Constructor for class org.apache.spark.ml.recommendation.ALS.InBlock$
 
ALS.Rating<ID> - Class in org.apache.spark.ml.recommendation
:: DeveloperApi :: Rating class for better code readability.
ALS.Rating(ID, ID, float) - Constructor for class org.apache.spark.ml.recommendation.ALS.Rating
 
ALS.Rating$ - Class in org.apache.spark.ml.recommendation
 
ALS.Rating$() - Constructor for class org.apache.spark.ml.recommendation.ALS.Rating$
 
ALS.RatingBlock$ - Class in org.apache.spark.ml.recommendation
 
ALS.RatingBlock$() - Constructor for class org.apache.spark.ml.recommendation.ALS.RatingBlock$
 
ALSModel - Class in org.apache.spark.ml.recommendation
:: Experimental :: Model fitted by ALS.
am() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterClusterManager
 
AnalysisException - Exception in org.apache.spark.sql
:: DeveloperApi :: Thrown when a query fails to analyze, usually because the query itself is invalid.
AnalysisException(String, Option<Object>, Option<Object>, Option<LogicalPlan>) - Constructor for exception org.apache.spark.sql.AnalysisException
 
and(Column) - Method in class org.apache.spark.sql.Column
Boolean AND.
And - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff both left or right evaluate to true.
And(Filter, Filter) - Constructor for class org.apache.spark.sql.sources.And
 
andThen(Function1<B, C>) - Static method in class org.apache.spark.sql.types.StructType
 
antecedent() - Method in class org.apache.spark.mllib.fpm.AssociationRules.Rule
 
ANY() - Static method in class org.apache.spark.scheduler.TaskLocality
 
AnyDataType - Class in org.apache.spark.sql.types
An AbstractDataType that matches any concrete data types.
AnyDataType() - Constructor for class org.apache.spark.sql.types.AnyDataType
 
anyNull() - Method in interface org.apache.spark.sql.Row
Returns true if there are any NULL values in this row.
appAttemptId() - Method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
appendBias(Vector) - Static method in class org.apache.spark.mllib.util.MLUtils
Returns a new vector with 1.0 (bias) appended to the input vector.
appendColumn(StructType, String, DataType, boolean) - Static method in class org.apache.spark.ml.util.SchemaUtils
Appends a new column to the input schema.
appendColumn(StructType, StructField) - Static method in class org.apache.spark.ml.util.SchemaUtils
Appends a new column to the input schema.
appendReadColumns(Configuration, Seq<Integer>, Seq<String>) - Static method in class org.apache.spark.sql.hive.HiveShim
 
appHistoryInfoToPublicAppInfo(ApplicationHistoryInfo) - Static method in class org.apache.spark.status.api.v1.ApplicationsListResource
 
appId() - Method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
applicationAttemptId() - Method in class org.apache.spark.SparkContext
 
ApplicationAttemptInfo - Class in org.apache.spark.status.api.v1
 
applicationEndFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
applicationEndToJson(SparkListenerApplicationEnd) - Static method in class org.apache.spark.util.JsonProtocol
 
applicationId() - Method in class org.apache.spark.SparkContext
A unique identifier for the Spark application.
ApplicationInfo - Class in org.apache.spark.status.api.v1
 
ApplicationsListResource - Class in org.apache.spark.status.api.v1
 
ApplicationsListResource() - Constructor for class org.apache.spark.status.api.v1.ApplicationsListResource
 
applicationStartFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
applicationStartToJson(SparkListenerApplicationStart) - Static method in class org.apache.spark.util.JsonProtocol
 
ApplicationStatus - Enum in org.apache.spark.status.api.v1
 
apply(RDD<Tuple2<Object, VD>>, RDD<Edge<ED>>, VD, StorageLevel, StorageLevel, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.Graph
Construct a graph from a collection of vertices and edges with attributes.
apply(RDD<Edge<ED>>, VD, StorageLevel, StorageLevel, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
Create a graph from edges, setting referenced vertices to `defaultVertexAttr`.
apply(RDD<Tuple2<Object, VD>>, RDD<Edge<ED>>, VD, StorageLevel, StorageLevel, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
Create a graph from vertices and edges, setting missing vertices to `defaultVertexAttr`.
apply(VertexRDD<VD>, EdgeRDD<ED>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
Create a graph from a VertexRDD and an EdgeRDD with arbitrary replicated vertices.
apply(Graph<VD, ED>, A, int, EdgeDirection, Function3<Object, VD, A, VD>, Function1<EdgeTriplet<VD, ED>, Iterator<Tuple2<Object, A>>>, Function2<A, A, A>, ClassTag<VD>, ClassTag<ED>, ClassTag<A>) - Static method in class org.apache.spark.graphx.Pregel
Execute a Pregel-like iterative vertex-parallel abstraction.
apply(RDD<Tuple2<Object, VD>>, ClassTag<VD>) - Static method in class org.apache.spark.graphx.VertexRDD
Constructs a standalone VertexRDD (one that is not set up for efficient joins with an EdgeRDD) from an RDD of vertex-attribute pairs.
apply(RDD<Tuple2<Object, VD>>, EdgeRDD<?>, VD, ClassTag<VD>) - Static method in class org.apache.spark.graphx.VertexRDD
Constructs a VertexRDD from an RDD of vertex-attribute pairs.
apply(RDD<Tuple2<Object, VD>>, EdgeRDD<?>, VD, Function2<VD, VD, VD>, ClassTag<VD>) - Static method in class org.apache.spark.graphx.VertexRDD
Constructs a VertexRDD from an RDD of vertex-attribute pairs.
apply(DenseMatrix<Object>, DenseMatrix<Object>, Function1<Object, Object>) - Static method in class org.apache.spark.ml.ann.ApplyInPlace
 
apply(DenseMatrix<Object>, DenseMatrix<Object>, DenseMatrix<Object>, Function2<Object, Object, Object>) - Static method in class org.apache.spark.ml.ann.ApplyInPlace
 
apply(String) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Gets an attribute by its name.
apply(int) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Gets an attribute by its index.
apply(int, int) - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
apply(int) - Method in class org.apache.spark.ml.linalg.DenseVector
 
apply(int, int) - Method in interface org.apache.spark.ml.linalg.Matrix
Gets the (i, j)-th element.
apply(int, int) - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
apply(int) - Static method in class org.apache.spark.ml.linalg.SparseVector
 
apply(int) - Method in interface org.apache.spark.ml.linalg.Vector
Gets the value of the ith element.
apply(Param<T>) - Method in class org.apache.spark.ml.param.ParamMap
Gets the value of the input param or its default value if it does not exist.
apply(Split) - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData$
 
apply(BinaryConfusionMatrix) - Static method in class org.apache.spark.mllib.evaluation.binary.FalsePositiveRate
 
apply(BinaryConfusionMatrix) - Static method in class org.apache.spark.mllib.evaluation.binary.Precision
 
apply(BinaryConfusionMatrix) - Static method in class org.apache.spark.mllib.evaluation.binary.Recall
 
apply(int, int) - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
apply(int) - Method in class org.apache.spark.mllib.linalg.DenseVector
 
apply(int, int) - Method in interface org.apache.spark.mllib.linalg.Matrix
Gets the (i, j)-th element.
apply(int, int) - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
apply(int) - Static method in class org.apache.spark.mllib.linalg.SparseVector
 
apply(int) - Method in interface org.apache.spark.mllib.linalg.Vector
Gets the value of the ith element.
apply(int) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
apply(int) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
apply(int) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
apply(int) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
apply(int, Predict, double, boolean) - Static method in class org.apache.spark.mllib.tree.model.Node
Construct a node with nodeIndex, predict, impurity and isLeaf parameters.
apply(int) - Static method in class org.apache.spark.rdd.CheckpointState
 
apply(long, String, Option<String>, String, boolean) - Static method in class org.apache.spark.scheduler.AccumulableInfo
Deprecated.
do not create AccumulableInfo. Since 2.0.0.
apply(long, String, Option<String>, String) - Static method in class org.apache.spark.scheduler.AccumulableInfo
Deprecated.
do not create AccumulableInfo. Since 2.0.0.
apply(long, String, String) - Static method in class org.apache.spark.scheduler.AccumulableInfo
Deprecated.
do not create AccumulableInfo. Since 2.0.0.
apply(String, long, Enumeration.Value, ByteBuffer) - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate$
Alternate factory method that takes a ByteBuffer directly for the data field
apply(long, TaskMetrics) - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
apply(int) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
apply(int) - Static method in class org.apache.spark.scheduler.TaskLocality
 
apply(Object) - Method in class org.apache.spark.sql.Column
Extracts a value or values from a complex type.
apply(String) - Method in class org.apache.spark.sql.Dataset
Selects column based on the column name and return it as a Column.
apply(Column...) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Creates a Column for this UDAF using given Columns as input arguments.
apply(Seq<Column>) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Creates a Column for this UDAF using given Columns as input arguments.
apply(Seq<Column>) - Method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
apply(int) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
apply(String) - Static method in class org.apache.spark.sql.ProcessingTime
Create a ProcessingTime.
apply(Duration) - Static method in class org.apache.spark.sql.ProcessingTime
Create a ProcessingTime.
apply(Dataset<Row>, Seq<Expression>, RelationalGroupedDataset.GroupType) - Static method in class org.apache.spark.sql.RelationalGroupedDataset
 
apply(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i.
apply(DataType) - Static method in class org.apache.spark.sql.types.ArrayType
Construct a ArrayType object with the given element type.
apply(double) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(long) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(int) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(BigDecimal) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(BigDecimal) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(BigDecimal, int, int) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(BigDecimal, int, int) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(long, int, int) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(String) - Static method in class org.apache.spark.sql.types.Decimal
 
apply(DataType, DataType) - Static method in class org.apache.spark.sql.types.MapType
Construct a MapType object with the given key type and value type.
apply(String) - Method in class org.apache.spark.sql.types.StructType
Extracts the StructField with the given name.
apply(Set<String>) - Method in class org.apache.spark.sql.types.StructType
Returns a StructType containing StructFields of the given names, preserving the original order of fields.
apply(int) - Method in class org.apache.spark.sql.types.StructType
 
apply(String) - Static method in class org.apache.spark.storage.BlockId
Converts a BlockId "name" String back into a BlockId.
apply(String, String, int) - Static method in class org.apache.spark.storage.BlockManagerId
Returns a BlockManagerId for the given configuration.
apply(ObjectInput) - Static method in class org.apache.spark.storage.BlockManagerId
 
apply(boolean, boolean, boolean, boolean, int) - Static method in class org.apache.spark.storage.StorageLevel
:: DeveloperApi :: Create a new StorageLevel object.
apply(boolean, boolean, boolean, int) - Static method in class org.apache.spark.storage.StorageLevel
:: DeveloperApi :: Create a new StorageLevel object without setting useOffHeap.
apply(int, int) - Static method in class org.apache.spark.storage.StorageLevel
:: DeveloperApi :: Create a new StorageLevel object from its integer representation.
apply(ObjectInput) - Static method in class org.apache.spark.storage.StorageLevel
:: DeveloperApi :: Read StorageLevel object from ObjectInput stream.
apply(String, int) - Static method in class org.apache.spark.streaming.kafka.Broker
 
apply(Map<String, String>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster.SimpleConsumerConfig$
Make a consumer config without requiring group.id or zookeeper.connect, since communicating with brokers also needs common settings such as timeout
apply(String, int, long, long) - Static method in class org.apache.spark.streaming.kafka.OffsetRange
 
apply(TopicAndPartition, long, long) - Static method in class org.apache.spark.streaming.kafka.OffsetRange
 
apply(long) - Static method in class org.apache.spark.streaming.Milliseconds
 
apply(long) - Static method in class org.apache.spark.streaming.Minutes
 
apply(int) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
apply(long) - Static method in class org.apache.spark.streaming.Seconds
 
apply(int) - Static method in class org.apache.spark.TaskState
 
apply(TraversableOnce<Object>) - Static method in class org.apache.spark.util.StatCounter
Build a StatCounter from a list of values.
apply(Seq<Object>) - Static method in class org.apache.spark.util.StatCounter
Build a StatCounter from a list of values passed as variable-length arguments.
ApplyInPlace - Class in org.apache.spark.ml.ann
Implements in-place application of functions in the arrays
ApplyInPlace() - Constructor for class org.apache.spark.ml.ann.ApplyInPlace
 
applyOrElse(A1, Function1<A1, B1>) - Static method in class org.apache.spark.sql.types.StructType
 
applySchemaToPythonRDD(RDD<Object[]>, String) - Method in class org.apache.spark.sql.SparkSession
Apply a schema defined by the schemaString to an RDD.
applySchemaToPythonRDD(RDD<Object[]>, StructType) - Method in class org.apache.spark.sql.SparkSession
Apply a schema defined by the schema to an RDD.
applySchemaToPythonRDD(RDD<Object[]>, String) - Method in class org.apache.spark.sql.SQLContext
Apply a schema defined by the schemaString to an RDD.
applySchemaToPythonRDD(RDD<Object[]>, StructType) - Method in class org.apache.spark.sql.SQLContext
Apply a schema defined by the schema to an RDD.
appName() - Method in class org.apache.spark.api.java.JavaSparkContext
 
appName() - Method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
appName() - Method in class org.apache.spark.SparkContext
 
appName(String) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets a name for the application, which will be shown in the Spark web UI.
approxCountDistinct(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the approximate number of distinct items in a group.
approxCountDistinct(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the approximate number of distinct items in a group.
approxCountDistinct(Column, double) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the approximate number of distinct items in a group.
approxCountDistinct(String, double) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the approximate number of distinct items in a group.
ApproxHist() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
approxQuantile(String, double[], double) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Calculates the approximate quantiles of a numerical column of a DataFrame.
AreaUnderCurve - Class in org.apache.spark.mllib.evaluation
Computes the area under the curve (AUC) using the trapezoidal rule.
AreaUnderCurve() - Constructor for class org.apache.spark.mllib.evaluation.AreaUnderCurve
 
areaUnderPR() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Computes the area under the precision-recall curve.
areaUnderROC() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
Computes the area under the receiver operating characteristic (ROC) curve.
areaUnderROC() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Computes the area under the receiver operating characteristic (ROC) curve.
argmax() - Method in class org.apache.spark.ml.linalg.DenseVector
 
argmax() - Method in class org.apache.spark.ml.linalg.SparseVector
 
argmax() - Method in interface org.apache.spark.ml.linalg.Vector
Find the index of a maximal element.
argmax() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
argmax() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
argmax() - Method in interface org.apache.spark.mllib.linalg.Vector
Find the index of a maximal element.
argString() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
array(DataType) - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type array.
array(Column...) - Static method in class org.apache.spark.sql.functions
Creates a new array column.
array(String, String...) - Static method in class org.apache.spark.sql.functions
Creates a new array column.
array(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Creates a new array column.
array(String, Seq<String>) - Static method in class org.apache.spark.sql.functions
Creates a new array column.
array_contains(Column, Object) - Static method in class org.apache.spark.sql.functions
Returns true if the array contains value
arrayLengthGt(double) - Static method in class org.apache.spark.ml.param.ParamValidators
Check that the array length is greater than lowerBound.
ArrayType - Class in org.apache.spark.sql.types
 
ArrayType(DataType, boolean) - Constructor for class org.apache.spark.sql.types.ArrayType
 
ArrayType() - Constructor for class org.apache.spark.sql.types.ArrayType
No-arg constructor for kryo.
as(Encoder<U>) - Method in class org.apache.spark.sql.Column
Provides a type hint about the expected return value of this column.
as(String) - Method in class org.apache.spark.sql.Column
Gives the column an alias.
as(Seq<String>) - Method in class org.apache.spark.sql.Column
(Scala-specific) Assigns the given aliases to the results of a table generating function.
as(String[]) - Method in class org.apache.spark.sql.Column
Assigns the given aliases to the results of a table generating function.
as(Symbol) - Method in class org.apache.spark.sql.Column
Gives the column an alias.
as(String, Metadata) - Method in class org.apache.spark.sql.Column
Gives the column an alias with metadata.
as(Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Returns a new Dataset where each record has been mapped on to the specified type.
as(String) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with an alias set.
as(Symbol) - Method in class org.apache.spark.sql.Dataset
(Scala-specific) Returns a new Dataset with an alias set.
asc() - Method in class org.apache.spark.sql.Column
Returns an ordering used in sorting.
asc(String) - Static method in class org.apache.spark.sql.functions
Returns a sort expression based on ascending order of the column.
ascii(Column) - Static method in class org.apache.spark.sql.functions
Computes the numeric value of the first character of the string column, and returns the result as a int column.
asCode() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
asin(Column) - Static method in class org.apache.spark.sql.functions
Computes the sine inverse of the given value; the returned angle is in the range -pi/2 through pi/2.
asin(String) - Static method in class org.apache.spark.sql.functions
Computes the sine inverse of the given column; the returned angle is in the range -pi/2 through pi/2.
asIterator() - Method in class org.apache.spark.serializer.DeserializationStream
Read the elements of this stream through an iterator.
asJavaPairRDD() - Method in class org.apache.spark.api.r.PairwiseRRDD
 
asJavaRDD() - Method in class org.apache.spark.api.r.RRDD
 
asJavaRDD() - Method in class org.apache.spark.api.r.StringRRDD
 
asKeyValueIterator() - Method in class org.apache.spark.serializer.DeserializationStream
Read the elements of this stream through an iterator over key-value pairs.
AskPermissionToCommitOutput - Class in org.apache.spark.scheduler
 
AskPermissionToCommitOutput(int, int, int) - Constructor for class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
askRpcTimeout(SparkConf) - Static method in class org.apache.spark.util.RpcUtils
Returns the default Spark timeout to use for RPC ask operations.
askSlaves() - Method in class org.apache.spark.storage.BlockManagerMessages.GetBlockStatus
 
askSlaves() - Method in class org.apache.spark.storage.BlockManagerMessages.GetMatchingBlockIds
 
asML() - Method in interface org.apache.spark.mllib.linalg.Matrix
Convert this matrix to the new mllib-local representation.
asML() - Method in interface org.apache.spark.mllib.linalg.Vector
Convert this vector to the new mllib-local representation.
asRDDId() - Method in class org.apache.spark.storage.BlockId
 
asRDDId() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
asRDDId() - Static method in class org.apache.spark.storage.RDDBlockId
 
asRDDId() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
asRDDId() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
asRDDId() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
asRDDId() - Static method in class org.apache.spark.storage.StreamBlockId
 
asRDDId() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
assertNotSpilled(SparkContext, String, Function0<T>) - Static method in class org.apache.spark.TestUtils
Run some code involving jobs submitted to the given context and assert that the jobs did not spill.
assertSpilled(SparkContext, String, Function0<T>) - Static method in class org.apache.spark.TestUtils
Run some code involving jobs submitted to the given context and assert that the jobs spilled.
assertValid() - Method in class org.apache.spark.broadcast.Broadcast
Check if this broadcast is valid.
assignments() - Method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel
 
AssociationRules - Class in org.apache.spark.mllib.fpm
:: Experimental ::
AssociationRules() - Constructor for class org.apache.spark.mllib.fpm.AssociationRules
Constructs a default instance with default parameters {minConfidence = 0.8}.
AssociationRules.Rule<Item> - Class in org.apache.spark.mllib.fpm
:: Experimental ::
AsyncRDDActions<T> - Class in org.apache.spark.rdd
A set of asynchronous RDD actions available through an implicit conversion.
AsyncRDDActions(RDD<T>, ClassTag<T>) - Constructor for class org.apache.spark.rdd.AsyncRDDActions
 
atan(Column) - Static method in class org.apache.spark.sql.functions
Computes the tangent inverse of the given value.
atan(String) - Static method in class org.apache.spark.sql.functions
Computes the tangent inverse of the given column.
atan2(Column, Column) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(Column, String) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(String, Column) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(String, String) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(Column, double) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(String, double) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(double, Column) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
atan2(double, String) - Static method in class org.apache.spark.sql.functions
Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).
attempt() - Method in class org.apache.spark.status.api.v1.TaskData
 
attemptId() - Method in class org.apache.spark.scheduler.StageInfo
 
attemptId() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
attemptId() - Method in class org.apache.spark.status.api.v1.StageData
 
attemptNumber() - Method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
attemptNumber() - Method in class org.apache.spark.scheduler.TaskInfo
 
attemptNumber() - Method in class org.apache.spark.TaskCommitDenied
 
attemptNumber() - Method in class org.apache.spark.TaskContext
How many times this task has been attempted.
attempts() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
attr() - Method in class org.apache.spark.graphx.Edge
 
attr() - Method in class org.apache.spark.graphx.EdgeContext
The attribute associated with the edge.
attr() - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
Attribute - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: Abstract class for ML attributes.
Attribute() - Constructor for class org.apache.spark.ml.attribute.Attribute
 
attribute() - Method in class org.apache.spark.sql.sources.EqualNullSafe
 
attribute() - Method in class org.apache.spark.sql.sources.EqualTo
 
attribute() - Method in class org.apache.spark.sql.sources.GreaterThan
 
attribute() - Method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
attribute() - Method in class org.apache.spark.sql.sources.In
 
attribute() - Method in class org.apache.spark.sql.sources.IsNotNull
 
attribute() - Method in class org.apache.spark.sql.sources.IsNull
 
attribute() - Method in class org.apache.spark.sql.sources.LessThan
 
attribute() - Method in class org.apache.spark.sql.sources.LessThanOrEqual
 
attribute() - Method in class org.apache.spark.sql.sources.StringContains
 
attribute() - Method in class org.apache.spark.sql.sources.StringEndsWith
 
attribute() - Method in class org.apache.spark.sql.sources.StringStartsWith
 
AttributeGroup - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: Attributes that describe a vector ML column.
AttributeGroup(String) - Constructor for class org.apache.spark.ml.attribute.AttributeGroup
Creates an attribute group without attribute info.
AttributeGroup(String, int) - Constructor for class org.apache.spark.ml.attribute.AttributeGroup
Creates an attribute group knowing only the number of attributes.
AttributeGroup(String, Attribute[]) - Constructor for class org.apache.spark.ml.attribute.AttributeGroup
Creates an attribute group with attributes.
AttributeKeys - Class in org.apache.spark.ml.attribute
Keys used to store attributes.
AttributeKeys() - Constructor for class org.apache.spark.ml.attribute.AttributeKeys
 
attributes() - Method in class org.apache.spark.ml.attribute.AttributeGroup
Optional array of attributes.
ATTRIBUTES() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
AttributeType - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: An enum-like type for attribute types: AttributeType$.Numeric, AttributeType$.Nominal, and AttributeType$.Binary.
AttributeType(String) - Constructor for class org.apache.spark.ml.attribute.AttributeType
 
attrType() - Method in class org.apache.spark.ml.attribute.Attribute
Attribute type.
attrType() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
attrType() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
attrType() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
attrType() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
available() - Method in class org.apache.spark.io.LZ4BlockInputStream
 
available() - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
Average() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
avg(MapFunction<T, Double>) - Static method in class org.apache.spark.sql.expressions.javalang.typed
Average aggregate function.
avg(Function1<IN, Object>) - Static method in class org.apache.spark.sql.expressions.scalalang.typed
Average aggregate function.
avg(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the average of the values in a group.
avg(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the average of the values in a group.
avg(String...) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the mean value for each numeric columns for each group.
avg(Seq<String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the mean value for each numeric columns for each group.
avg() - Method in class org.apache.spark.util.DoubleAccumulator
Returns the average of elements added to the accumulator.
avg() - Method in class org.apache.spark.util.LongAccumulator
Returns the average of elements added to the accumulator.
avgMetrics() - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
awaitAnyTermination() - Method in class org.apache.spark.sql.ContinuousQueryManager
Wait until any of the queries on the associated SQLContext has terminated since the creation of the context, or since resetTerminated() was called.
awaitAnyTermination(long) - Method in class org.apache.spark.sql.ContinuousQueryManager
Wait until any of the queries on the associated SQLContext has terminated since the creation of the context, or since resetTerminated() was called.
awaitResult(Awaitable<T>, Duration) - Static method in class org.apache.spark.util.ThreadUtils
Preferred alternative to Await.result().
awaitTermination() - Method in interface org.apache.spark.sql.ContinuousQuery
Waits for the termination of this query, either by query.stop() or by an exception.
awaitTermination(long) - Method in interface org.apache.spark.sql.ContinuousQuery
Waits for the termination of this query, either by query.stop() or by an exception.
awaitTermination() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Wait for the execution to stop.
awaitTermination() - Method in class org.apache.spark.streaming.StreamingContext
Wait for the execution to stop.
awaitTerminationOrTimeout(long) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Wait for the execution to stop.
awaitTerminationOrTimeout(long) - Method in class org.apache.spark.streaming.StreamingContext
Wait for the execution to stop.
axpy(double, Vector, Vector) - Static method in class org.apache.spark.ml.linalg.BLAS
y += a * x
axpy(double, Vector, Vector) - Static method in class org.apache.spark.mllib.linalg.BLAS
y += a * x

B

BACKUP_STANDALONE_MASTER_PREFIX() - Static method in class org.apache.spark.util.Utils
An identifier that backup masters use in their responses.
balanceSlack() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
base64(Column) - Static method in class org.apache.spark.sql.functions
Computes the BASE64 encoding of a binary column and returns it as a string column.
baseOn(ParamPair<?>...) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Sets the given parameters in this grid to fixed values.
baseOn(ParamMap) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Sets the given parameters in this grid to fixed values.
baseOn(Seq<ParamPair<?>>) - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Sets the given parameters in this grid to fixed values.
BaseRelation - Class in org.apache.spark.sql.sources
::DeveloperApi:: Represents a collection of tuples with a known schema.
BaseRelation() - Constructor for class org.apache.spark.sql.sources.BaseRelation
 
baseRelationToDataFrame(BaseRelation) - Method in class org.apache.spark.sql.SparkSession
Convert a BaseRelation created for external data sources into a DataFrame.
baseRelationToDataFrame(BaseRelation) - Method in class org.apache.spark.sql.SQLContext
Convert a BaseRelation created for external data sources into a DataFrame.
BaseRRDD<T,U> - Class in org.apache.spark.api.r
 
BaseRRDD(RDD<T>, int, byte[], String, String, byte[], Broadcast<Object>[], ClassTag<T>, ClassTag<U>) - Constructor for class org.apache.spark.api.r.BaseRRDD
 
baseScope() - Method in class org.apache.spark.streaming.dstream.DStream
The base scope associated with the operation that created this DStream.
baseScope() - Method in class org.apache.spark.streaming.dstream.InputDStream
The base scope associated with the operation that created this DStream.
basicSparkPage(Function0<Seq<Node>>, String, boolean) - Static method in class org.apache.spark.ui.UIUtils
Returns a page with the spark css/js and a simple format.
BATCHES() - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
BatchInfo - Class in org.apache.spark.streaming.scheduler
:: DeveloperApi :: Class having information on completed batches.
BatchInfo(Time, Map<Object, StreamInputInfo>, long, Option<Object>, Option<Object>, Map<Object, OutputOperationInfo>) - Constructor for class org.apache.spark.streaming.scheduler.BatchInfo
 
batchInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
batchInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
batchInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
batchInfos() - Method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
batchTime() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
 
batchTime() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
bean(Class<T>) - Static method in class org.apache.spark.sql.Encoders
Creates an encoder for Java Bean of type T.
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.DB2Dialect
 
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.DerbyDialect
 
beforeFetch(Connection, Map<String, String>) - Method in class org.apache.spark.sql.jdbc.JdbcDialect
Override connection specific properties to run before a select is made.
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.NoopDialect
 
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
beforeFetch(Connection, Map<String, String>) - Static method in class org.apache.spark.sql.jdbc.PostgresDialect
 
BernoulliCellSampler<T> - Class in org.apache.spark.util.random
:: DeveloperApi :: A sampler based on Bernoulli trials for partitioning a data sequence.
BernoulliCellSampler(double, double, boolean) - Constructor for class org.apache.spark.util.random.BernoulliCellSampler
 
BernoulliSampler<T> - Class in org.apache.spark.util.random
:: DeveloperApi :: A sampler based on Bernoulli trials.
BernoulliSampler(double, ClassTag<T>) - Constructor for class org.apache.spark.util.random.BernoulliSampler
 
bestModel() - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
bestModel() - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
beta() - Method in class org.apache.spark.mllib.random.WeibullGenerator
 
between(Object, Object) - Method in class org.apache.spark.sql.Column
True if the current column is between the lower bound and upper bound, inclusive.
bin(Column) - Static method in class org.apache.spark.sql.functions
An expression that returns the string representation of the binary value of the given long column.
bin(String) - Static method in class org.apache.spark.sql.functions
An expression that returns the string representation of the binary value of the given long column.
Binarizer - Class in org.apache.spark.ml.feature
:: Experimental :: Binarize a column of continuous features given a threshold.
Binarizer(String) - Constructor for class org.apache.spark.ml.feature.Binarizer
 
Binarizer() - Constructor for class org.apache.spark.ml.feature.Binarizer
 
Binary() - Static method in class org.apache.spark.ml.attribute.AttributeType
Binary type.
binary() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
binary() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
binary() - Method in class org.apache.spark.ml.feature.HashingTF
Binary toggle to control term frequency counts.
binary() - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type binary.
BINARY() - Static method in class org.apache.spark.sql.Encoders
An encoder for arrays of bytes.
BinaryAttribute - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: A binary attribute.
BinaryClassificationEvaluator - Class in org.apache.spark.ml.evaluation
:: Experimental :: Evaluator for binary classification, which expects two input columns: rawPrediction and label.
BinaryClassificationEvaluator(String) - Constructor for class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
BinaryClassificationEvaluator() - Constructor for class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
BinaryClassificationMetrics - Class in org.apache.spark.mllib.evaluation
Evaluator for binary classification.
BinaryClassificationMetrics(RDD<Tuple2<Object, Object>>, int) - Constructor for class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
 
BinaryClassificationMetrics(RDD<Tuple2<Object, Object>>) - Constructor for class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Defaults numBins to 0.
binaryFiles(String, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Read a directory of binary files from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI as a byte array.
binaryFiles(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Read a directory of binary files from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI as a byte array.
binaryFiles(String, int) - Method in class org.apache.spark.SparkContext
Get an RDD for a Hadoop-readable dataset as PortableDataStream for each file (useful for binary data)
binaryLabelValidator() - Static method in class org.apache.spark.mllib.util.DataValidators
Function to check if labels used for classification are either zero or one.
BinaryLogisticRegressionSummary - Class in org.apache.spark.ml.classification
:: Experimental :: Binary Logistic regression results for a given model.
BinaryLogisticRegressionTrainingSummary - Class in org.apache.spark.ml.classification
:: Experimental :: Logistic regression training results.
binaryRecords(String, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Load data from a flat binary file, assuming the length of each record is constant.
binaryRecords(String, int, Configuration) - Method in class org.apache.spark.SparkContext
Load data from a flat binary file, assuming the length of each record is constant.
binaryRecordsStream(String, int) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream that monitors a Hadoop-compatible filesystem for new files and reads them as flat binary files with fixed record lengths, yielding byte arrays
binaryRecordsStream(String, int) - Method in class org.apache.spark.streaming.StreamingContext
Create an input stream that monitors a Hadoop-compatible filesystem for new files and reads them as flat binary files, assuming a fixed length per record, generating one byte array per record.
BinarySample - Class in org.apache.spark.mllib.stat.test
Class that represents the group and value of a sample.
BinarySample(boolean, double) - Constructor for class org.apache.spark.mllib.stat.test.BinarySample
 
BinaryType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Array[Byte] values.
BinaryType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the BinaryType object.
BinomialBounds - Class in org.apache.spark.util.random
Utility functions that help us determine bounds on adjusted sampling rate to guarantee exact sample size with high confidence when sampling without replacement.
BinomialBounds() - Constructor for class org.apache.spark.util.random.BinomialBounds
 
BisectingKMeans - Class in org.apache.spark.ml.clustering
:: Experimental ::
BisectingKMeans(String) - Constructor for class org.apache.spark.ml.clustering.BisectingKMeans
 
BisectingKMeans() - Constructor for class org.apache.spark.ml.clustering.BisectingKMeans
 
BisectingKMeans - Class in org.apache.spark.mllib.clustering
A bisecting k-means algorithm based on the paper "A comparison of document clustering techniques" by Steinbach, Karypis, and Kumar, with modification to fit Spark.
BisectingKMeans() - Constructor for class org.apache.spark.mllib.clustering.BisectingKMeans
Constructs with the default configuration
BisectingKMeansModel - Class in org.apache.spark.ml.clustering
:: Experimental :: Model fitted by BisectingKMeans.
BisectingKMeansModel - Class in org.apache.spark.mllib.clustering
Clustering model produced by BisectingKMeans.
BisectingKMeansModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.clustering
 
BisectingKMeansModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.clustering.BisectingKMeansModel.SaveLoadV1_0$
 
bitSize() - Method in class org.apache.spark.util.sketch.BloomFilter
Returns the number of bits in the underlying bit array.
bitwiseAND(Object) - Method in class org.apache.spark.sql.Column
Compute bitwise AND of this expression with another expression.
bitwiseNOT(Column) - Static method in class org.apache.spark.sql.functions
Computes bitwise NOT.
bitwiseOR(Object) - Method in class org.apache.spark.sql.Column
Compute bitwise OR of this expression with another expression.
bitwiseXOR(Object) - Method in class org.apache.spark.sql.Column
Compute bitwise XOR of this expression with another expression.
BLAS - Class in org.apache.spark.ml.linalg
BLAS routines for MLlib's vectors and matrices.
BLAS() - Constructor for class org.apache.spark.ml.linalg.BLAS
 
BLAS - Class in org.apache.spark.mllib.linalg
BLAS routines for MLlib's vectors and matrices.
BLAS() - Constructor for class org.apache.spark.mllib.linalg.BLAS
 
BlockId - Class in org.apache.spark.storage
:: DeveloperApi :: Identifies a particular Block of data, usually associated with a single file.
BlockId() - Constructor for class org.apache.spark.storage.BlockId
 
blockId() - Method in class org.apache.spark.storage.BlockManagerMessages.GetBlockStatus
 
blockId() - Method in class org.apache.spark.storage.BlockManagerMessages.GetLocations
 
blockId() - Method in class org.apache.spark.storage.BlockManagerMessages.RemoveBlock
 
blockId() - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
blockId() - Method in class org.apache.spark.storage.BlockUpdatedInfo
 
blockIds() - Method in class org.apache.spark.storage.BlockManagerMessages.GetLocationsMultipleBlockIds
 
blockManager() - Method in class org.apache.spark.SparkEnv
 
blockManagerAddedFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
blockManagerAddedToJson(SparkListenerBlockManagerAdded) - Static method in class org.apache.spark.util.JsonProtocol
 
blockManagerId() - Method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
blockManagerId() - Method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
BlockManagerId - Class in org.apache.spark.storage
:: DeveloperApi :: This class represent an unique identifier for a BlockManager.
blockManagerId() - Method in class org.apache.spark.storage.BlockManagerMessages.BlockManagerHeartbeat
 
blockManagerId() - Method in class org.apache.spark.storage.BlockManagerMessages.GetPeers
 
blockManagerId() - Method in class org.apache.spark.storage.BlockManagerMessages.RegisterBlockManager
 
blockManagerId() - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
blockManagerId() - Method in class org.apache.spark.storage.BlockUpdatedInfo
 
blockManagerId() - Method in class org.apache.spark.storage.StorageStatus
 
blockManagerIdCache() - Static method in class org.apache.spark.storage.BlockManagerId
 
blockManagerIdFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
blockManagerIds() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
blockManagerIdToJson(BlockManagerId) - Static method in class org.apache.spark.util.JsonProtocol
 
BlockManagerMessages - Class in org.apache.spark.storage
 
BlockManagerMessages() - Constructor for class org.apache.spark.storage.BlockManagerMessages
 
BlockManagerMessages.BlockManagerHeartbeat - Class in org.apache.spark.storage
 
BlockManagerMessages.BlockManagerHeartbeat(BlockManagerId) - Constructor for class org.apache.spark.storage.BlockManagerMessages.BlockManagerHeartbeat
 
BlockManagerMessages.BlockManagerHeartbeat$ - Class in org.apache.spark.storage
 
BlockManagerMessages.BlockManagerHeartbeat$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.BlockManagerHeartbeat$
 
BlockManagerMessages.GetBlockStatus - Class in org.apache.spark.storage
 
BlockManagerMessages.GetBlockStatus(BlockId, boolean) - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetBlockStatus
 
BlockManagerMessages.GetBlockStatus$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetBlockStatus$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetBlockStatus$
 
BlockManagerMessages.GetExecutorEndpointRef - Class in org.apache.spark.storage
 
BlockManagerMessages.GetExecutorEndpointRef(String) - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetExecutorEndpointRef
 
BlockManagerMessages.GetExecutorEndpointRef$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetExecutorEndpointRef$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetExecutorEndpointRef$
 
BlockManagerMessages.GetLocations - Class in org.apache.spark.storage
 
BlockManagerMessages.GetLocations(BlockId) - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetLocations
 
BlockManagerMessages.GetLocations$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetLocations$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetLocations$
 
BlockManagerMessages.GetLocationsMultipleBlockIds - Class in org.apache.spark.storage
 
BlockManagerMessages.GetLocationsMultipleBlockIds(BlockId[]) - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetLocationsMultipleBlockIds
 
BlockManagerMessages.GetLocationsMultipleBlockIds$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetLocationsMultipleBlockIds$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetLocationsMultipleBlockIds$
 
BlockManagerMessages.GetMatchingBlockIds - Class in org.apache.spark.storage
 
BlockManagerMessages.GetMatchingBlockIds(Function1<BlockId, Object>, boolean) - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetMatchingBlockIds
 
BlockManagerMessages.GetMatchingBlockIds$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetMatchingBlockIds$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetMatchingBlockIds$
 
BlockManagerMessages.GetMemoryStatus$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetMemoryStatus$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetMemoryStatus$
 
BlockManagerMessages.GetPeers - Class in org.apache.spark.storage
 
BlockManagerMessages.GetPeers(BlockManagerId) - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetPeers
 
BlockManagerMessages.GetPeers$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetPeers$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetPeers$
 
BlockManagerMessages.GetStorageStatus$ - Class in org.apache.spark.storage
 
BlockManagerMessages.GetStorageStatus$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.GetStorageStatus$
 
BlockManagerMessages.HasCachedBlocks - Class in org.apache.spark.storage
 
BlockManagerMessages.HasCachedBlocks(String) - Constructor for class org.apache.spark.storage.BlockManagerMessages.HasCachedBlocks
 
BlockManagerMessages.HasCachedBlocks$ - Class in org.apache.spark.storage
 
BlockManagerMessages.HasCachedBlocks$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.HasCachedBlocks$
 
BlockManagerMessages.RegisterBlockManager - Class in org.apache.spark.storage
 
BlockManagerMessages.RegisterBlockManager(BlockManagerId, long, org.apache.spark.rpc.RpcEndpointRef) - Constructor for class org.apache.spark.storage.BlockManagerMessages.RegisterBlockManager
 
BlockManagerMessages.RegisterBlockManager$ - Class in org.apache.spark.storage
 
BlockManagerMessages.RegisterBlockManager$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.RegisterBlockManager$
 
BlockManagerMessages.RemoveBlock - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveBlock(BlockId) - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveBlock
 
BlockManagerMessages.RemoveBlock$ - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveBlock$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveBlock$
 
BlockManagerMessages.RemoveBroadcast - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveBroadcast(long, boolean) - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveBroadcast
 
BlockManagerMessages.RemoveBroadcast$ - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveBroadcast$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveBroadcast$
 
BlockManagerMessages.RemoveExecutor - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveExecutor(String) - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveExecutor
 
BlockManagerMessages.RemoveExecutor$ - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveExecutor$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveExecutor$
 
BlockManagerMessages.RemoveRdd - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveRdd(int) - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveRdd
 
BlockManagerMessages.RemoveRdd$ - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveRdd$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveRdd$
 
BlockManagerMessages.RemoveShuffle - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveShuffle(int) - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveShuffle
 
BlockManagerMessages.RemoveShuffle$ - Class in org.apache.spark.storage
 
BlockManagerMessages.RemoveShuffle$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.RemoveShuffle$
 
BlockManagerMessages.StopBlockManagerMaster$ - Class in org.apache.spark.storage
 
BlockManagerMessages.StopBlockManagerMaster$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.StopBlockManagerMaster$
 
BlockManagerMessages.ToBlockManagerMaster - Interface in org.apache.spark.storage
 
BlockManagerMessages.ToBlockManagerSlave - Interface in org.apache.spark.storage
 
BlockManagerMessages.TriggerThreadDump$ - Class in org.apache.spark.storage
Driver -> Executor message to trigger a thread dump.
BlockManagerMessages.TriggerThreadDump$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.TriggerThreadDump$
 
BlockManagerMessages.UpdateBlockInfo - Class in org.apache.spark.storage
 
BlockManagerMessages.UpdateBlockInfo(BlockManagerId, BlockId, StorageLevel, long, long) - Constructor for class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
BlockManagerMessages.UpdateBlockInfo() - Constructor for class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
BlockManagerMessages.UpdateBlockInfo$ - Class in org.apache.spark.storage
 
BlockManagerMessages.UpdateBlockInfo$() - Constructor for class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo$
 
blockManagerRemovedFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
blockManagerRemovedToJson(SparkListenerBlockManagerRemoved) - Static method in class org.apache.spark.util.JsonProtocol
 
BlockMatrix - Class in org.apache.spark.mllib.linalg.distributed
Represents a distributed matrix in blocks of local matrices.
BlockMatrix(RDD<Tuple2<Tuple2<Object, Object>, Matrix>>, int, int, long, long) - Constructor for class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
BlockMatrix(RDD<Tuple2<Tuple2<Object, Object>, Matrix>>, int, int) - Constructor for class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Alternate constructor for BlockMatrix without the input of the number of rows and columns.
blockName() - Method in class org.apache.spark.status.api.v1.RDDPartitionInfo
 
BlockNotFoundException - Exception in org.apache.spark.storage
 
BlockNotFoundException(String) - Constructor for exception org.apache.spark.storage.BlockNotFoundException
 
blocks() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
blocks() - Method in class org.apache.spark.storage.StorageStatus
Return the blocks stored in this block manager.
blockSize() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
BlockStatus - Class in org.apache.spark.storage
 
BlockStatus(StorageLevel, long, long) - Constructor for class org.apache.spark.storage.BlockStatus
 
blockStatusFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
blockStatusToJson(BlockStatus) - Static method in class org.apache.spark.util.JsonProtocol
 
blockUpdatedInfo() - Method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
BlockUpdatedInfo - Class in org.apache.spark.storage
:: DeveloperApi :: Stores information about a block status in a block manager.
BlockUpdatedInfo(BlockManagerId, BlockId, StorageLevel, long, long) - Constructor for class org.apache.spark.storage.BlockUpdatedInfo
 
bloomFilter(String, long, double) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Bloom filter over a specified column.
bloomFilter(Column, long, double) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Bloom filter over a specified column.
bloomFilter(String, long, long) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Bloom filter over a specified column.
bloomFilter(Column, long, long) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Bloom filter over a specified column.
BloomFilter - Class in org.apache.spark.util.sketch
A Bloom filter is a space-efficient probabilistic data structure that offers an approximate containment test with one-sided error: if it claims that an item is contained in it, this might be in error, but if it claims that an item is not contained in it, then this is definitely true.
BloomFilter() - Constructor for class org.apache.spark.util.sketch.BloomFilter
 
BloomFilter.Version - Enum in org.apache.spark.util.sketch
 
bmAddress() - Method in class org.apache.spark.FetchFailed
 
BOOLEAN() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable boolean type.
BooleanParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Boolean] for Java.
BooleanParam(String, String, String) - Constructor for class org.apache.spark.ml.param.BooleanParam
 
BooleanParam(Identifiable, String, String) - Constructor for class org.apache.spark.ml.param.BooleanParam
 
BooleanType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Boolean values.
BooleanType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the BooleanType object.
boost(RDD<LabeledPoint>, RDD<LabeledPoint>, BoostingStrategy, boolean, long) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Internal method for performing regression using trees as base learners.
BoostingStrategy - Class in org.apache.spark.mllib.tree.configuration
Configuration options for GradientBoostedTrees.
BoostingStrategy(Strategy, Loss, int, double, double) - Constructor for class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
Both() - Static method in class org.apache.spark.graphx.EdgeDirection
Edges originating from *and* arriving at a vertex of interest.
boundaries() - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
Boundaries in increasing order for which predictions are known.
boundaries() - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
BoundedDouble - Class in org.apache.spark.partial
A Double value with error bars and associated confidence.
BoundedDouble(double, double, double, double) - Constructor for class org.apache.spark.partial.BoundedDouble
 
BreezeUtil - Class in org.apache.spark.ml.ann
In-place DGEMM and DGEMV for Breeze
BreezeUtil() - Constructor for class org.apache.spark.ml.ann.BreezeUtil
 
broadcast(T) - Method in class org.apache.spark.api.java.JavaSparkContext
Broadcast a read-only variable to the cluster, returning a Broadcast object for reading it in distributed functions.
Broadcast<T> - Class in org.apache.spark.broadcast
A broadcast variable.
Broadcast(long, ClassTag<T>) - Constructor for class org.apache.spark.broadcast.Broadcast
 
broadcast(T, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Broadcast a read-only variable to the cluster, returning a Broadcast object for reading it in distributed functions.
broadcast(Dataset<T>) - Static method in class org.apache.spark.sql.functions
Marks a DataFrame as small enough for use in broadcast joins.
BROADCAST() - Static method in class org.apache.spark.storage.BlockId
 
BroadcastBlockId - Class in org.apache.spark.storage
 
BroadcastBlockId(long, String) - Constructor for class org.apache.spark.storage.BroadcastBlockId
 
broadcastId() - Method in class org.apache.spark.CleanBroadcast
 
broadcastId() - Method in class org.apache.spark.storage.BlockManagerMessages.RemoveBroadcast
 
broadcastId() - Method in class org.apache.spark.storage.BroadcastBlockId
 
broadcastManager() - Method in class org.apache.spark.SparkEnv
 
Broker - Class in org.apache.spark.streaming.kafka
Represents the host and port info for a Kafka broker.
bround(Column) - Static method in class org.apache.spark.sql.functions
Returns the value of the column e rounded to 0 decimal places with HALF_EVEN round mode.
bround(Column, int) - Static method in class org.apache.spark.sql.functions
Round the value of e to scale decimal places with HALF_EVEN round mode if scale >= 0 or at integral part when scale < 0.
bucketBy(int, String, String...) - Method in class org.apache.spark.sql.DataFrameWriter
Buckets the output by the given columns.
bucketBy(int, String, Seq<String>) - Method in class org.apache.spark.sql.DataFrameWriter
Buckets the output by the given columns.
Bucketizer - Class in org.apache.spark.ml.feature
:: Experimental :: Bucketizer maps a column of continuous features to a column of feature buckets.
Bucketizer(String) - Constructor for class org.apache.spark.ml.feature.Bucketizer
 
Bucketizer() - Constructor for class org.apache.spark.ml.feature.Bucketizer
 
buffer() - Method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
bufferEncoder() - Method in class org.apache.spark.sql.expressions.Aggregator
Specifies the Encoder for the intermediate value type.
BufferReleasingInputStream - Class in org.apache.spark.storage
Helper class that ensures a ManagedBuffer is released upon InputStream.close()
BufferReleasingInputStream(InputStream, ShuffleBlockFetcherIterator) - Constructor for class org.apache.spark.storage.BufferReleasingInputStream
 
bufferSchema() - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
A StructType represents data types of values in the aggregation buffer.
build(Node, int) - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData$
Create DecisionTreeModelReadWrite.NodeData instances for this node and all children.
build(DecisionTreeModel, int) - Method in class org.apache.spark.ml.tree.EnsembleModelReadWrite.EnsembleNodeData$
Create EnsembleModelReadWrite.EnsembleNodeData instances for the given tree.
build() - Method in class org.apache.spark.ml.tuning.ParamGridBuilder
Builds and returns all combinations of parameters specified by the param grid.
build() - Method in class org.apache.spark.sql.types.MetadataBuilder
Builds the Metadata instance.
builder() - Static method in class org.apache.spark.sql.SparkSession
Creates a SparkSession.Builder for constructing a SparkSession.
buildFormattedString(DataType, String, StringBuilder) - Static method in class org.apache.spark.sql.types.DataType
 
buildReader(SparkSession, StructType, StructType, StructType, Seq<Filter>, Map<String, String>, Configuration) - Method in class org.apache.spark.ml.source.libsvm.DefaultSource
 
buildScan(Seq<Attribute>, Seq<Expression>) - Method in interface org.apache.spark.sql.sources.CatalystScan
 
buildScan(String[], Filter[]) - Method in interface org.apache.spark.sql.sources.PrunedFilteredScan
 
buildScan(String[]) - Method in interface org.apache.spark.sql.sources.PrunedScan
 
buildScan() - Method in interface org.apache.spark.sql.sources.TableScan
 
buildTreeFromNodes(DecisionTreeModelReadWrite.NodeData[], String) - Static method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite
Given all data for all nodes in a tree, rebuild the tree.
BYTE() - Static method in class org.apache.spark.api.r.SerializationFormats
 
BYTE() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable byte type.
BytecodeUtils - Class in org.apache.spark.graphx.util
Includes an utility function to test whether a function accesses a specific attribute of an object.
BytecodeUtils() - Constructor for class org.apache.spark.graphx.util.BytecodeUtils
 
byteFromString(String, ByteUnit) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
BYTES_READ() - Method in class org.apache.spark.InternalAccumulator.input$
 
BYTES_WRITTEN() - Method in class org.apache.spark.InternalAccumulator.output$
 
BYTES_WRITTEN() - Method in class org.apache.spark.InternalAccumulator.shuffleWrite$
 
bytesRead() - Method in class org.apache.spark.status.api.v1.InputMetricDistributions
 
bytesRead() - Method in class org.apache.spark.status.api.v1.InputMetrics
 
bytesToString(long) - Static method in class org.apache.spark.util.Utils
Convert a quantity in bytes to a human-readable string such as "4.0 MB".
byteStringAsBytes(String) - Static method in class org.apache.spark.util.Utils
Convert a passed byte string (e.g.
byteStringAsGb(String) - Static method in class org.apache.spark.util.Utils
Convert a passed byte string (e.g.
byteStringAsKb(String) - Static method in class org.apache.spark.util.Utils
Convert a passed byte string (e.g.
byteStringAsMb(String) - Static method in class org.apache.spark.util.Utils
Convert a passed byte string (e.g.
bytesWritten() - Method in class org.apache.spark.status.api.v1.OutputMetricDistributions
 
bytesWritten() - Method in class org.apache.spark.status.api.v1.OutputMetrics
 
bytesWritten() - Method in class org.apache.spark.status.api.v1.ShuffleWriteMetrics
 
byteToString(long, ByteUnit) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
ByteType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Byte values.
ByteType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the ByteType object.

C

cache() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Persist this RDD with the default storage level (`MEMORY_ONLY`).
cache() - Method in class org.apache.spark.api.java.JavaPairRDD
Persist this RDD with the default storage level (`MEMORY_ONLY`).
cache() - Method in class org.apache.spark.api.java.JavaRDD
Persist this RDD with the default storage level (`MEMORY_ONLY`).
cache() - Static method in class org.apache.spark.api.r.RRDD
 
cache() - Static method in class org.apache.spark.graphx.EdgeRDD
 
cache() - Method in class org.apache.spark.graphx.Graph
Caches the vertices and edges associated with this graph at the previously-specified target storage levels, which default to MEMORY_ONLY.
cache() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
Persists the edge partitions using `targetStorageLevel`, which defaults to MEMORY_ONLY.
cache() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
cache() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
Persists the vertex partitions at `targetStorageLevel`, which defaults to MEMORY_ONLY.
cache() - Static method in class org.apache.spark.graphx.VertexRDD
 
cache() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Caches the underlying RDD.
cache() - Static method in class org.apache.spark.rdd.HadoopRDD
 
cache() - Static method in class org.apache.spark.rdd.JdbcRDD
 
cache() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
cache() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
cache() - Method in class org.apache.spark.rdd.RDD
Persist this RDD with the default storage level (`MEMORY_ONLY`).
cache() - Method in class org.apache.spark.sql.Dataset
Persist this Dataset with the default storage level (MEMORY_AND_DISK).
cache() - Method in class org.apache.spark.streaming.api.java.JavaDStream
Persist RDDs of this DStream with the default storage level (MEMORY_ONLY_SER)
cache() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
cache() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Persist RDDs of this DStream with the default storage level (MEMORY_ONLY_SER)
cache() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
cache() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
cache() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
cache() - Method in class org.apache.spark.streaming.dstream.DStream
Persist RDDs of this DStream with the default storage level (MEMORY_ONLY_SER)
cacheManager() - Method in class org.apache.spark.sql.SparkSession
 
cacheManager() - Method in class org.apache.spark.sql.SQLContext
 
cacheNodeIds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
cacheNodeIds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
cacheNodeIds() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
cacheNodeIds() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
cacheNodeIds() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
cacheNodeIds() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
cacheNodeIds() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
cacheNodeIds() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
cacheNodeIds() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
cacheNodeIds() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
cacheNodeIds() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
cacheNodeIds() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
cacheSize() - Method in interface org.apache.spark.SparkExecutorInfo
 
cacheSize() - Method in class org.apache.spark.SparkExecutorInfoImpl
 
cacheSize() - Method in class org.apache.spark.storage.StorageStatus
Return the memory used by caching RDDs
cacheTable(String) - Method in class org.apache.spark.sql.catalog.Catalog
Caches the specified table in-memory.
cacheTable(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Caches the specified table in-memory.
cacheTable(String) - Method in class org.apache.spark.sql.SQLContext
Caches the specified table in-memory.
calculate(DenseVector<Object>) - Method in class org.apache.spark.ml.classification.LogisticCostFun
 
calculate(DenseVector<Object>) - Method in class org.apache.spark.ml.regression.AFTCostFun
 
calculate(DenseVector<Object>) - Method in class org.apache.spark.ml.regression.LeastSquaresCostFun
 
calculate(double[], double) - Static method in class org.apache.spark.mllib.tree.impurity.Entropy
:: DeveloperApi :: information calculation for multiclass classification
calculate(double, double, double) - Static method in class org.apache.spark.mllib.tree.impurity.Entropy
:: DeveloperApi :: variance calculation
calculate(double[], double) - Static method in class org.apache.spark.mllib.tree.impurity.Gini
:: DeveloperApi :: information calculation for multiclass classification
calculate(double, double, double) - Static method in class org.apache.spark.mllib.tree.impurity.Gini
:: DeveloperApi :: variance calculation
calculate(double[], double) - Method in interface org.apache.spark.mllib.tree.impurity.Impurity
:: DeveloperApi :: information calculation for multiclass classification
calculate(double, double, double) - Method in interface org.apache.spark.mllib.tree.impurity.Impurity
:: DeveloperApi :: information calculation for regression
calculate(double[], double) - Static method in class org.apache.spark.mllib.tree.impurity.Variance
:: DeveloperApi :: information calculation for multiclass classification
calculate(double, double, double) - Static method in class org.apache.spark.mllib.tree.impurity.Variance
:: DeveloperApi :: variance calculation
CalendarIntervalType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing calendar time intervals.
CalendarIntervalType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the CalendarIntervalType object.
call(K, Iterator<V1>, Iterator<V2>) - Method in interface org.apache.spark.api.java.function.CoGroupFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.DoubleFlatMapFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.DoubleFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.FilterFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.FlatMapFunction
 
call(T1, T2) - Method in interface org.apache.spark.api.java.function.FlatMapFunction2
 
call(K, Iterator<V>) - Method in interface org.apache.spark.api.java.function.FlatMapGroupsFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.ForeachFunction
 
call(Iterator<T>) - Method in interface org.apache.spark.api.java.function.ForeachPartitionFunction
 
call(T1) - Method in interface org.apache.spark.api.java.function.Function
 
call() - Method in interface org.apache.spark.api.java.function.Function0
 
call(T1, T2) - Method in interface org.apache.spark.api.java.function.Function2
 
call(T1, T2, T3) - Method in interface org.apache.spark.api.java.function.Function3
 
call(T1, T2, T3, T4) - Method in interface org.apache.spark.api.java.function.Function4
 
call(T) - Method in interface org.apache.spark.api.java.function.MapFunction
 
call(K, Iterator<V>) - Method in interface org.apache.spark.api.java.function.MapGroupsFunction
 
call(Iterator<T>) - Method in interface org.apache.spark.api.java.function.MapPartitionsFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.PairFlatMapFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.PairFunction
 
call(T, T) - Method in interface org.apache.spark.api.java.function.ReduceFunction
 
call(T) - Method in interface org.apache.spark.api.java.function.VoidFunction
 
call(T1, T2) - Method in interface org.apache.spark.api.java.function.VoidFunction2
 
call(T1) - Method in interface org.apache.spark.sql.api.java.UDF1
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) - Method in interface org.apache.spark.sql.api.java.UDF10
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11) - Method in interface org.apache.spark.sql.api.java.UDF11
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12) - Method in interface org.apache.spark.sql.api.java.UDF12
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13) - Method in interface org.apache.spark.sql.api.java.UDF13
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14) - Method in interface org.apache.spark.sql.api.java.UDF14
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15) - Method in interface org.apache.spark.sql.api.java.UDF15
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16) - Method in interface org.apache.spark.sql.api.java.UDF16
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17) - Method in interface org.apache.spark.sql.api.java.UDF17
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18) - Method in interface org.apache.spark.sql.api.java.UDF18
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19) - Method in interface org.apache.spark.sql.api.java.UDF19
 
call(T1, T2) - Method in interface org.apache.spark.sql.api.java.UDF2
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20) - Method in interface org.apache.spark.sql.api.java.UDF20
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21) - Method in interface org.apache.spark.sql.api.java.UDF21
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22) - Method in interface org.apache.spark.sql.api.java.UDF22
 
call(T1, T2, T3) - Method in interface org.apache.spark.sql.api.java.UDF3
 
call(T1, T2, T3, T4) - Method in interface org.apache.spark.sql.api.java.UDF4
 
call(T1, T2, T3, T4, T5) - Method in interface org.apache.spark.sql.api.java.UDF5
 
call(T1, T2, T3, T4, T5, T6) - Method in interface org.apache.spark.sql.api.java.UDF6
 
call(T1, T2, T3, T4, T5, T6, T7) - Method in interface org.apache.spark.sql.api.java.UDF7
 
call(T1, T2, T3, T4, T5, T6, T7, T8) - Method in interface org.apache.spark.sql.api.java.UDF8
 
call(T1, T2, T3, T4, T5, T6, T7, T8, T9) - Method in interface org.apache.spark.sql.api.java.UDF9
 
callSite() - Method in class org.apache.spark.storage.RDDInfo
 
callUDF(String, Column...) - Static method in class org.apache.spark.sql.functions
Call an user-defined function.
callUDF(String, Seq<Column>) - Static method in class org.apache.spark.sql.functions
Call an user-defined function.
cancel() - Method in class org.apache.spark.ComplexFutureAction
 
cancel() - Method in interface org.apache.spark.FutureAction
Cancels the execution of this action.
cancel() - Method in class org.apache.spark.SimpleFutureAction
 
cancelAllJobs() - Method in class org.apache.spark.api.java.JavaSparkContext
Cancel all jobs that have been scheduled or are running.
cancelAllJobs() - Method in class org.apache.spark.SparkContext
Cancel all jobs that have been scheduled or are running.
cancelJobGroup(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Cancel active jobs for the specified group.
cancelJobGroup(String) - Method in class org.apache.spark.SparkContext
Cancel active jobs for the specified group.
canEqual(Object) - Static method in class org.apache.spark.Aggregator
 
canEqual(Object) - Static method in class org.apache.spark.CleanAccum
 
canEqual(Object) - Static method in class org.apache.spark.CleanBroadcast
 
canEqual(Object) - Static method in class org.apache.spark.CleanCheckpoint
 
canEqual(Object) - Static method in class org.apache.spark.CleanRDD
 
canEqual(Object) - Static method in class org.apache.spark.CleanShuffle
 
canEqual(Object) - Static method in class org.apache.spark.ExceptionFailure
 
canEqual(Object) - Static method in class org.apache.spark.ExecutorLostFailure
 
canEqual(Object) - Static method in class org.apache.spark.ExecutorRegistered
 
canEqual(Object) - Static method in class org.apache.spark.ExecutorRemoved
 
canEqual(Object) - Static method in class org.apache.spark.ExpireDeadHosts
 
canEqual(Object) - Static method in class org.apache.spark.FetchFailed
 
canEqual(Object) - Static method in class org.apache.spark.graphx.Edge
 
canEqual(Object) - Static method in class org.apache.spark.ml.feature.Dot
 
canEqual(Object) - Static method in class org.apache.spark.ml.feature.LabeledPoint
 
canEqual(Object) - Static method in class org.apache.spark.ml.param.ParamPair
 
canEqual(Object) - Static method in class org.apache.spark.mllib.feature.VocabWord
 
canEqual(Object) - Static method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
canEqual(Object) - Static method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
canEqual(Object) - Static method in class org.apache.spark.mllib.linalg.QRDecomposition
 
canEqual(Object) - Static method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
canEqual(Object) - Static method in class org.apache.spark.mllib.recommendation.Rating
 
canEqual(Object) - Static method in class org.apache.spark.mllib.regression.LabeledPoint
 
canEqual(Object) - Static method in class org.apache.spark.mllib.stat.test.BinarySample
 
canEqual(Object) - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
canEqual(Object) - Static method in class org.apache.spark.mllib.tree.model.Split
 
canEqual(Object) - Static method in class org.apache.spark.Resubmitted
 
canEqual(Object) - Static method in class org.apache.spark.rpc.netty.OnStart
 
canEqual(Object) - Static method in class org.apache.spark.rpc.netty.OnStop
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.AccumulableInfo
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.AllJobsCancelled
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
canEqual(Object) - Method in class org.apache.spark.scheduler.cluster.ExecutorInfo
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.JobSucceeded
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.local.KillTask
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.local.ReviveOffers
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.local.StatusUpdate
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.local.StopExecutor
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.ResubmitFailedStages
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
canEqual(Object) - Static method in class org.apache.spark.scheduler.StopCoordinator
 
canEqual(Object) - Static method in class org.apache.spark.sql.DatasetHolder
 
canEqual(Object) - Static method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
canEqual(Object) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
canEqual(Object) - Static method in class org.apache.spark.sql.internal.HiveSerDe
 
canEqual(Object) - Static method in class org.apache.spark.sql.jdbc.JdbcType
 
canEqual(Object) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
canEqual(Object) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
canEqual(Object) - Static method in class org.apache.spark.sql.ProcessingTime
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.And
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.EqualNullSafe
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.EqualTo
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.GreaterThan
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.In
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.IsNotNull
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.IsNull
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.LessThan
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.LessThanOrEqual
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.Not
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.Or
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.StringContains
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.StringEndsWith
 
canEqual(Object) - Static method in class org.apache.spark.sql.sources.StringStartsWith
 
canEqual(Object) - Static method in class org.apache.spark.sql.types.ArrayType
 
canEqual(Object) - Static method in class org.apache.spark.sql.types.DecimalType
 
canEqual(Object) - Static method in class org.apache.spark.sql.types.MapType
 
canEqual(Object) - Static method in class org.apache.spark.sql.types.StructField
 
canEqual(Object) - Static method in class org.apache.spark.sql.types.StructType
 
canEqual(Object) - Static method in class org.apache.spark.StopMapOutputTracker
 
canEqual(Object) - Static method in class org.apache.spark.storage.BlockStatus
 
canEqual(Object) - Static method in class org.apache.spark.storage.BlockUpdatedInfo
 
canEqual(Object) - Static method in class org.apache.spark.storage.BroadcastBlockId
 
canEqual(Object) - Static method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
canEqual(Object) - Static method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
canEqual(Object) - Static method in class org.apache.spark.storage.RDDBlockId
 
canEqual(Object) - Static method in class org.apache.spark.storage.ShuffleBlockId
 
canEqual(Object) - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
canEqual(Object) - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
canEqual(Object) - Static method in class org.apache.spark.storage.StreamBlockId
 
canEqual(Object) - Static method in class org.apache.spark.storage.TaskResultBlockId
 
canEqual(Object) - Static method in class org.apache.spark.streaming.Duration
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.AllReceiverIds
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.BatchInfo
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StopAllReceivers
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
canEqual(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
canEqual(Object) - Static method in class org.apache.spark.streaming.Time
 
canEqual(Object) - Static method in class org.apache.spark.Success
 
canEqual(Object) - Static method in class org.apache.spark.TaskCommitDenied
 
canEqual(Object) - Static method in class org.apache.spark.TaskKilled
 
canEqual(Object) - Static method in class org.apache.spark.TaskResultLost
 
canEqual(Object) - Static method in class org.apache.spark.TaskSchedulerIsSet
 
canEqual(Object) - Static method in class org.apache.spark.UnknownReason
 
canEqual(Object) - Static method in class org.apache.spark.util.MethodIdentifier
 
canEqual(Object) - Method in class org.apache.spark.util.MutablePair
 
canHandle(String) - Method in class org.apache.spark.sql.jdbc.AggregatedDialect
 
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.DB2Dialect
 
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.DerbyDialect
 
canHandle(String) - Method in class org.apache.spark.sql.jdbc.JdbcDialect
Check if this dialect instance can handle a certain jdbc url.
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.NoopDialect
 
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
canHandle(String) - Static method in class org.apache.spark.sql.jdbc.PostgresDialect
 
canonicalized() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
cartesian(JavaRDDLike<U, ?>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
cartesian(JavaRDDLike<U, ?>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
cartesian(JavaRDDLike<U, ?>) - Static method in class org.apache.spark.api.java.JavaRDD
 
cartesian(JavaRDDLike<U, ?>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return the Cartesian product of this RDD and another one, that is, the RDD of all pairs of elements (a, b) where a is in this and b is in other.
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
cartesian(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
cartesian(RDD<U>, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Return the Cartesian product of this RDD and another one, that is, the RDD of all pairs of elements (a, b) where a is in this and b is in other.
caseSensitive() - Method in class org.apache.spark.ml.feature.StopWordsRemover
Whether to do a case sensitive comparison over the stop words.
cast(DataType) - Method in class org.apache.spark.sql.Column
Casts the column to a different data type.
cast(String) - Method in class org.apache.spark.sql.Column
Casts the column to a different data type, using the canonical string representation of the type.
Catalog - Class in org.apache.spark.sql.catalog
Catalog interface for Spark.
Catalog() - Constructor for class org.apache.spark.sql.catalog.Catalog
 
catalog() - Method in class org.apache.spark.sql.SparkSession
Interface through which the user may create, drop, alter or query underlying databases, tables, functions etc.
CatalogImpl - Class in org.apache.spark.sql.internal
Internal implementation of the user-facing Catalog.
CatalogImpl(SparkSession) - Constructor for class org.apache.spark.sql.internal.CatalogImpl
 
catalogString() - Static method in class org.apache.spark.sql.types.ArrayType
 
catalogString() - Static method in class org.apache.spark.sql.types.BinaryType
 
catalogString() - Static method in class org.apache.spark.sql.types.BooleanType
 
catalogString() - Static method in class org.apache.spark.sql.types.ByteType
 
catalogString() - Static method in class org.apache.spark.sql.types.CalendarIntervalType
 
catalogString() - Method in class org.apache.spark.sql.types.DataType
String representation for the type saved in external catalogs.
catalogString() - Static method in class org.apache.spark.sql.types.DateType
 
catalogString() - Static method in class org.apache.spark.sql.types.DecimalType
 
catalogString() - Static method in class org.apache.spark.sql.types.DoubleType
 
catalogString() - Static method in class org.apache.spark.sql.types.FloatType
 
catalogString() - Static method in class org.apache.spark.sql.types.IntegerType
 
catalogString() - Static method in class org.apache.spark.sql.types.LongType
 
catalogString() - Static method in class org.apache.spark.sql.types.MapType
 
catalogString() - Static method in class org.apache.spark.sql.types.NullType
 
catalogString() - Static method in class org.apache.spark.sql.types.NumericType
 
catalogString() - Static method in class org.apache.spark.sql.types.ShortType
 
catalogString() - Static method in class org.apache.spark.sql.types.StringType
 
catalogString() - Static method in class org.apache.spark.sql.types.StructType
 
catalogString() - Static method in class org.apache.spark.sql.types.TimestampType
 
CatalystScan - Interface in org.apache.spark.sql.sources
::Experimental:: An interface for experimenting with a more direct connection to the query planner.
Categorical() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
categoricalFeaturesInfo() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
CategoricalSplit - Class in org.apache.spark.ml.tree
:: DeveloperApi :: Split which tests a categorical feature.
categories() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.SplitData
 
categories() - Method in class org.apache.spark.mllib.tree.model.Split
 
categoryMaps() - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
cause() - Method in exception org.apache.spark.sql.ContinuousQueryException
 
CausedBy - Class in org.apache.spark.util
Extractor Object for pulling out the root cause of an error.
CausedBy() - Constructor for class org.apache.spark.util.CausedBy
 
cbrt(Column) - Static method in class org.apache.spark.sql.functions
Computes the cube-root of the given value.
cbrt(String) - Static method in class org.apache.spark.sql.functions
Computes the cube-root of the given column.
ceil(Column) - Static method in class org.apache.spark.sql.functions
Computes the ceiling of the given value.
ceil(String) - Static method in class org.apache.spark.sql.functions
Computes the ceiling of the given column.
ceil() - Method in class org.apache.spark.sql.types.Decimal
 
censorCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
censorCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
chainl1(Function0<Parsers.Parser<T>>, Function0<Parsers.Parser<Function2<T, T, T>>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
chainl1(Function0<Parsers.Parser<T>>, Function0<Parsers.Parser<U>>, Function0<Parsers.Parser<Function2<T, U, T>>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
chainr1(Function0<Parsers.Parser<T>>, Function0<Parsers.Parser<Function2<T, U, U>>>, Function2<T, U, U>, U) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
changePrecision(int, int) - Method in class org.apache.spark.sql.types.Decimal
Update precision and scale while keeping our value the same, and return true if successful.
changePrecision(int, int, int) - Method in class org.apache.spark.sql.types.Decimal
 
checkColumnType(StructType, String, DataType, String) - Static method in class org.apache.spark.ml.util.SchemaUtils
Check whether the given schema contains a column of the required data type.
checkColumnTypes(StructType, String, Seq<DataType>, String) - Static method in class org.apache.spark.ml.util.SchemaUtils
Check whether the given schema contains a column of one of the require data types.
checkErrors(Either<ArrayBuffer<Throwable>, T>) - Static method in class org.apache.spark.streaming.kafka.KafkaCluster
If the result is right, return it, otherwise throw SparkException
checkFileExists(String, Configuration) - Static method in class org.apache.spark.streaming.util.HdfsUtils
Check if the file exists at the given path.
checkHost(String, String) - Static method in class org.apache.spark.util.Utils
 
checkHostPort(String, String) - Static method in class org.apache.spark.util.Utils
 
checkNumericType(StructType, String, String) - Static method in class org.apache.spark.ml.util.SchemaUtils
Check whether the given schema contains a column of the numeric data type.
checkpoint() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
checkpoint() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
checkpoint() - Static method in class org.apache.spark.api.java.JavaRDD
 
checkpoint() - Method in interface org.apache.spark.api.java.JavaRDDLike
Mark this RDD for checkpointing.
checkpoint() - Static method in class org.apache.spark.api.r.RRDD
 
checkpoint() - Static method in class org.apache.spark.graphx.EdgeRDD
 
checkpoint() - Method in class org.apache.spark.graphx.Graph
Mark this Graph for checkpointing.
checkpoint() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
checkpoint() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
checkpoint() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
checkpoint() - Static method in class org.apache.spark.graphx.VertexRDD
 
checkpoint() - Method in class org.apache.spark.rdd.HadoopRDD
 
checkpoint() - Static method in class org.apache.spark.rdd.JdbcRDD
 
checkpoint() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
checkpoint() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
checkpoint() - Method in class org.apache.spark.rdd.RDD
Mark this RDD for checkpointing.
checkpoint(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
checkpoint(Duration) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Enable periodic checkpointing of RDDs of this DStream.
checkpoint(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
checkpoint(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
checkpoint(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
checkpoint(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
checkpoint(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
checkpoint(String) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Sets the context to periodically checkpoint the DStream operations for master fault-tolerance.
checkpoint(Duration) - Method in class org.apache.spark.streaming.dstream.DStream
Enable periodic checkpointing of RDDs of this DStream
checkpoint(String) - Method in class org.apache.spark.streaming.StreamingContext
Set the context to periodically checkpoint the DStream operations for driver fault-tolerance.
Checkpointed() - Static method in class org.apache.spark.rdd.CheckpointState
 
checkpointFile(String) - Method in class org.apache.spark.api.java.JavaSparkContext
 
checkpointFile(String, ClassTag<T>) - Method in class org.apache.spark.SparkContext
 
CheckpointingInProgress() - Static method in class org.apache.spark.rdd.CheckpointState
 
checkpointInterval() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
checkpointInterval() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
checkpointInterval() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
checkpointInterval() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.clustering.LDA
 
checkpointInterval() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.recommendation.ALS
 
checkpointInterval() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
checkpointInterval() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
checkpointInterval() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
checkpointInterval() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
checkpointInterval() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
CheckpointReader - Class in org.apache.spark.streaming
 
CheckpointReader() - Constructor for class org.apache.spark.streaming.CheckpointReader
 
CheckpointState - Class in org.apache.spark.rdd
Enumeration to manage state transitions of an RDD through checkpointing [ Initialized --> checkpointing in progress --> checkpointed ].
CheckpointState() - Constructor for class org.apache.spark.rdd.CheckpointState
 
checkState(boolean, Function0<String>) - Static method in class org.apache.spark.streaming.util.HdfsUtils
 
checkThresholdConsistency() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
checkThresholdConsistency() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
child() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
child() - Method in class org.apache.spark.sql.sources.Not
 
CHILD_CONNECTION_TIMEOUT - Static variable in class org.apache.spark.launcher.SparkLauncher
Maximum time (in ms) to wait for a child process to connect back to the launcher server when using @link{#start()}.
CHILD_PROCESS_LOGGER_NAME - Static variable in class org.apache.spark.launcher.SparkLauncher
Logger name to use when launching a child process.
children() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
chiSqFunc() - Method in class org.apache.spark.mllib.stat.test.ChiSqTest.Method
 
ChiSqSelector - Class in org.apache.spark.ml.feature
:: Experimental :: Chi-Squared feature selection, which selects categorical features to use for predicting a categorical label.
ChiSqSelector(String) - Constructor for class org.apache.spark.ml.feature.ChiSqSelector
 
ChiSqSelector() - Constructor for class org.apache.spark.ml.feature.ChiSqSelector
 
ChiSqSelector - Class in org.apache.spark.mllib.feature
Creates a ChiSquared feature selector.
ChiSqSelector(int) - Constructor for class org.apache.spark.mllib.feature.ChiSqSelector
 
ChiSqSelectorModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by ChiSqSelector.
ChiSqSelectorModel - Class in org.apache.spark.mllib.feature
Chi Squared selector model.
ChiSqSelectorModel(int[]) - Constructor for class org.apache.spark.mllib.feature.ChiSqSelectorModel
 
ChiSqSelectorModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.feature
 
ChiSqSelectorModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.feature.ChiSqSelectorModel.SaveLoadV1_0$
 
ChiSqSelectorModel.SaveLoadV1_0$.Data - Class in org.apache.spark.mllib.feature
Model data for import/export
ChiSqSelectorModel.SaveLoadV1_0$.Data(int) - Constructor for class org.apache.spark.mllib.feature.ChiSqSelectorModel.SaveLoadV1_0$.Data
 
chiSqTest(Vector, Vector) - Static method in class org.apache.spark.mllib.stat.Statistics
Conduct Pearson's chi-squared goodness of fit test of the observed data against the expected distribution.
chiSqTest(Vector) - Static method in class org.apache.spark.mllib.stat.Statistics
Conduct Pearson's chi-squared goodness of fit test of the observed data against the uniform distribution, with each category having an expected frequency of 1 / observed.size.
chiSqTest(Matrix) - Static method in class org.apache.spark.mllib.stat.Statistics
Conduct Pearson's independence test on the input contingency matrix, which cannot contain negative entries or columns or rows that sum up to 0.
chiSqTest(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.stat.Statistics
Conduct Pearson's independence test for every feature against the label across the input RDD.
chiSqTest(JavaRDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.stat.Statistics
Java-friendly version of chiSqTest()
ChiSqTest - Class in org.apache.spark.mllib.stat.test
Conduct the chi-squared test for the input RDDs using the specified method.
ChiSqTest() - Constructor for class org.apache.spark.mllib.stat.test.ChiSqTest
 
ChiSqTest.Method - Class in org.apache.spark.mllib.stat.test
param: name String name for the method.
ChiSqTest.Method(String, Function2<Object, Object, Object>) - Constructor for class org.apache.spark.mllib.stat.test.ChiSqTest.Method
 
ChiSqTest.Method$ - Class in org.apache.spark.mllib.stat.test
 
ChiSqTest.Method$() - Constructor for class org.apache.spark.mllib.stat.test.ChiSqTest.Method$
 
ChiSqTest.NullHypothesis$ - Class in org.apache.spark.mllib.stat.test
 
ChiSqTest.NullHypothesis$() - Constructor for class org.apache.spark.mllib.stat.test.ChiSqTest.NullHypothesis$
 
ChiSqTestResult - Class in org.apache.spark.mllib.stat.test
Object containing the test results for the chi-squared hypothesis test.
chiSquared(Vector, Vector, String) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
chiSquaredFeatures(RDD<LabeledPoint>, String) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
Conduct Pearson's independence test for each feature against the label across the input RDD.
chiSquaredMatrix(Matrix, String) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
chmod700(File) - Static method in class org.apache.spark.util.Utils
JDK equivalent of chmod 700 file.
CholeskyDecomposition - Class in org.apache.spark.mllib.linalg
Compute Cholesky decomposition.
CholeskyDecomposition() - Constructor for class org.apache.spark.mllib.linalg.CholeskyDecomposition
 
chunkedByteBuffer() - Method in class org.apache.spark.util.io.ChunkedByteBufferInputStream
 
ChunkedByteBufferInputStream - Class in org.apache.spark.util.io
Reads data from a ChunkedByteBuffer.
ChunkedByteBufferInputStream(ChunkedByteBuffer, boolean) - Constructor for class org.apache.spark.util.io.ChunkedByteBufferInputStream
 
classForName(String) - Static method in class org.apache.spark.util.Utils
Preferred alternative to Class.forName(className)
Classification() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
ClassificationModel<FeaturesType,M extends ClassificationModel<FeaturesType,M>> - Class in org.apache.spark.ml.classification
:: DeveloperApi ::
ClassificationModel() - Constructor for class org.apache.spark.ml.classification.ClassificationModel
 
ClassificationModel - Interface in org.apache.spark.mllib.classification
Represents a classification model that predicts to which of a set of categories an example belongs.
Classifier<FeaturesType,E extends Classifier<FeaturesType,E,M>,M extends ClassificationModel<FeaturesType,M>> - Class in org.apache.spark.ml.classification
:: DeveloperApi ::
Classifier() - Constructor for class org.apache.spark.ml.classification.Classifier
 
classifier() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
classifier() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
classIsLoadable(String) - Static method in class org.apache.spark.util.Utils
Determines whether the provided class is loadable in the current thread.
className() - Method in class org.apache.spark.ExceptionFailure
 
className() - Method in class org.apache.spark.sql.catalog.Function
 
classpathEntries() - Method in class org.apache.spark.ui.env.EnvironmentListener
 
classTag() - Method in class org.apache.spark.api.java.JavaDoubleRDD
 
classTag() - Method in class org.apache.spark.api.java.JavaPairRDD
 
classTag() - Method in class org.apache.spark.api.java.JavaRDD
 
classTag() - Method in interface org.apache.spark.api.java.JavaRDDLike
 
classTag() - Method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
classTag() - Method in interface org.apache.spark.storage.memory.MemoryEntry
 
classTag() - Method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
classTag() - Method in class org.apache.spark.streaming.api.java.JavaDStream
 
classTag() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
 
classTag() - Method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
classTag() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
classTag() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
classTag() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
classTag() - Method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
clean(long, boolean) - Method in class org.apache.spark.streaming.util.WriteAheadLog
Clean all the records that are older than the threshold time.
clean(Object, boolean, boolean) - Static method in class org.apache.spark.util.ClosureCleaner
Clean the given closure in place.
CleanAccum - Class in org.apache.spark
 
CleanAccum(long) - Constructor for class org.apache.spark.CleanAccum
 
cleanArgs() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
CleanBroadcast - Class in org.apache.spark
 
CleanBroadcast(long) - Constructor for class org.apache.spark.CleanBroadcast
 
CleanCheckpoint - Class in org.apache.spark
 
CleanCheckpoint(int) - Constructor for class org.apache.spark.CleanCheckpoint
 
CleanRDD - Class in org.apache.spark
 
CleanRDD(int) - Constructor for class org.apache.spark.CleanRDD
 
CleanShuffle - Class in org.apache.spark
 
CleanShuffle(int) - Constructor for class org.apache.spark.CleanShuffle
 
CleanupTask - Interface in org.apache.spark
Classes that represent cleaning tasks.
CleanupTaskWeakReference - Class in org.apache.spark
A WeakReference associated with a CleanupTask.
CleanupTaskWeakReference(CleanupTask, Object, ReferenceQueue<Object>) - Constructor for class org.apache.spark.CleanupTaskWeakReference
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.LDA
 
clear(Param<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
clear(Param<?>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
clear(Param<?>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.DCT
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.IDF
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Interaction
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.NGram
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.PCA
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormula
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
clear(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
clear(Param<?>) - Method in interface org.apache.spark.ml.param.Params
Clears the user-supplied value for the input param.
clear(Param<?>) - Static method in class org.apache.spark.ml.Pipeline
 
clear(Param<?>) - Static method in class org.apache.spark.ml.PipelineModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
clear(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
clear(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
clear(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
clear(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
clear(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
clear() - Method in class org.apache.spark.sql.util.ExecutionListenerManager
Removes all the registered QueryExecutionListener.
clear() - Static method in class org.apache.spark.util.AccumulatorContext
Clears all registered AccumulatorV2s.
clearActive() - Static method in class org.apache.spark.sql.SQLContext
Clears the active SQLContext for current thread.
clearCache() - Method in class org.apache.spark.sql.catalog.Catalog
Removes all cached tables from the in-memory cache.
clearCache() - Method in class org.apache.spark.sql.internal.CatalogImpl
Removes all cached tables from the in-memory cache.
clearCache() - Method in class org.apache.spark.sql.SQLContext
Removes all cached tables from the in-memory cache.
clearCallSite() - Method in class org.apache.spark.api.java.JavaSparkContext
Pass-through to SparkContext.setCallSite.
clearCallSite() - Method in class org.apache.spark.SparkContext
Clear the thread-local property for overriding the call sites of actions and RDDs.
clearDependencies() - Static method in class org.apache.spark.api.r.RRDD
 
clearDependencies() - Static method in class org.apache.spark.graphx.EdgeRDD
 
clearDependencies() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
clearDependencies() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
clearDependencies() - Static method in class org.apache.spark.graphx.VertexRDD
 
clearDependencies() - Method in class org.apache.spark.rdd.CoGroupedRDD
 
clearDependencies() - Static method in class org.apache.spark.rdd.HadoopRDD
 
clearDependencies() - Static method in class org.apache.spark.rdd.JdbcRDD
 
clearDependencies() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
clearDependencies() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
clearDependencies() - Method in class org.apache.spark.rdd.RDD
Clears the dependencies of this RDD.
clearDependencies() - Method in class org.apache.spark.rdd.ShuffledRDD
 
clearDependencies() - Method in class org.apache.spark.rdd.UnionRDD
 
clearJobGroup() - Method in class org.apache.spark.api.java.JavaSparkContext
Clear the current thread's job group ID and its description.
clearJobGroup() - Method in class org.apache.spark.SparkContext
Clear the current thread's job group ID and its description.
clearThreshold() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
Clears the threshold so that predict will output raw prediction scores.
clearThreshold() - Method in class org.apache.spark.mllib.classification.SVMModel
Clears the threshold so that predict will output raw prediction scores.
clone() - Method in class org.apache.spark.SparkConf
Copy this object
clone() - Method in class org.apache.spark.sql.types.Decimal
 
clone() - Method in class org.apache.spark.storage.StorageLevel
 
clone() - Method in class org.apache.spark.util.random.BernoulliCellSampler
 
clone() - Method in class org.apache.spark.util.random.BernoulliSampler
 
clone() - Method in class org.apache.spark.util.random.PoissonSampler
 
clone() - Method in interface org.apache.spark.util.random.RandomSampler
return a copy of the RandomSampler object
clone(T, SerializerInstance, ClassTag<T>) - Static method in class org.apache.spark.util.Utils
Clone an object using a Spark serializer.
cloneComplement() - Method in class org.apache.spark.util.random.BernoulliCellSampler
Return a sampler that is the complement of the range specified of the current sampler.
close() - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
close() - Method in class org.apache.spark.api.java.JavaSparkContext
 
close() - Method in class org.apache.spark.io.SnappyOutputStreamWrapper
 
close() - Method in class org.apache.spark.serializer.DeserializationStream
 
close() - Method in class org.apache.spark.serializer.SerializationStream
 
close() - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
close() - Method in class org.apache.spark.storage.memory.RedirectableOutputStream
 
close() - Method in class org.apache.spark.storage.TimeTrackingOutputStream
 
close() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
 
close() - Method in class org.apache.spark.streaming.util.WriteAheadLog
Close this log and release any resources.
close() - Method in class org.apache.spark.util.io.ChunkedByteBufferInputStream
 
ClosureCleaner - Class in org.apache.spark.util
A cleaner that renders closures serializable if they can be done so safely.
ClosureCleaner() - Constructor for class org.apache.spark.util.ClosureCleaner
 
closureSerializer() - Method in class org.apache.spark.SparkEnv
 
cls() - Method in class org.apache.spark.util.MethodIdentifier
 
clsTag() - Method in interface org.apache.spark.sql.Encoder
A ClassTag that can be used to construct and Array to contain a collection of `T`.
cluster() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
Cluster centers of the transformed data.
cluster() - Method in class org.apache.spark.ml.clustering.KMeansSummary
Cluster centers of the transformed data.
cluster() - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering.Assignment
 
clusterCenters() - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
clusterCenters() - Method in class org.apache.spark.ml.clustering.KMeansModel
 
clusterCenters() - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Leaf cluster centers.
clusterCenters() - Method in class org.apache.spark.mllib.clustering.KMeansModel
 
clusterCenters() - Method in class org.apache.spark.mllib.clustering.StreamingKMeansModel
 
clusterSizes() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
Size of (number of data points in) each cluster.
clusterSizes() - Method in class org.apache.spark.ml.clustering.KMeansSummary
Size of (number of data points in) each cluster.
clusterWeights() - Method in class org.apache.spark.mllib.clustering.StreamingKMeansModel
 
cn() - Method in class org.apache.spark.mllib.feature.VocabWord
 
coalesce(int) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int, boolean) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int, boolean) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int) - Method in class org.apache.spark.api.java.JavaRDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int, boolean) - Method in class org.apache.spark.api.java.JavaRDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
coalesce(int, RDD<?>) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
Runs the packing algorithm and returns an array of PartitionGroups that if possible are load balanced and grouped by locality
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
coalesce(int, RDD<?>) - Method in interface org.apache.spark.rdd.PartitionCoalescer
Coalesce the partitions of the given RDD.
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
coalesce(int, boolean, Option<PartitionCoalescer>, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD that is reduced into numPartitions partitions.
coalesce(int) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset that has exactly numPartitions partitions.
coalesce(Column...) - Static method in class org.apache.spark.sql.functions
Returns the first column that is not null, or null if all inputs are null.
coalesce(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Returns the first column that is not null, or null if all inputs are null.
coalesce$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
coalesce$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
coalesce$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
coalesce$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
coalesce$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
coalesce$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
coalesce$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
coalesce$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
coalesce$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
coalesce$default$3() - Static method in class org.apache.spark.api.r.RRDD
 
coalesce$default$3() - Static method in class org.apache.spark.graphx.EdgeRDD
 
coalesce$default$3() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
coalesce$default$3() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
coalesce$default$3() - Static method in class org.apache.spark.graphx.VertexRDD
 
coalesce$default$3() - Static method in class org.apache.spark.rdd.HadoopRDD
 
coalesce$default$3() - Static method in class org.apache.spark.rdd.JdbcRDD
 
coalesce$default$3() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
coalesce$default$3() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.api.r.RRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.graphx.VertexRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
coalesce$default$4(int, boolean, Option<PartitionCoalescer>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
CoarseGrainedClusterMessages - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages
 
CoarseGrainedClusterMessages.AddWebUIFilter - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.AddWebUIFilter(String, Map<String, String>, String) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter
 
CoarseGrainedClusterMessages.AddWebUIFilter$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.AddWebUIFilter$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter$
 
CoarseGrainedClusterMessages.GetExecutorLossReason - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.GetExecutorLossReason(String) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.GetExecutorLossReason
 
CoarseGrainedClusterMessages.GetExecutorLossReason$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.GetExecutorLossReason$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.GetExecutorLossReason$
 
CoarseGrainedClusterMessages.KillExecutors - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.KillExecutors(Seq<String>) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillExecutors
 
CoarseGrainedClusterMessages.KillExecutors$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.KillExecutors$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillExecutors$
 
CoarseGrainedClusterMessages.KillTask - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.KillTask(long, String, boolean) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillTask
 
CoarseGrainedClusterMessages.KillTask$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.KillTask$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillTask$
 
CoarseGrainedClusterMessages.LaunchTask - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.LaunchTask(org.apache.spark.util.SerializableBuffer) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.LaunchTask
 
CoarseGrainedClusterMessages.LaunchTask$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.LaunchTask$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.LaunchTask$
 
CoarseGrainedClusterMessages.RegisterClusterManager - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisterClusterManager(org.apache.spark.rpc.RpcEndpointRef) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterClusterManager
 
CoarseGrainedClusterMessages.RegisterClusterManager$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisterClusterManager$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterClusterManager$
 
CoarseGrainedClusterMessages.RegisteredExecutor - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisteredExecutor(String) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisteredExecutor
 
CoarseGrainedClusterMessages.RegisteredExecutor$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisteredExecutor$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisteredExecutor$
 
CoarseGrainedClusterMessages.RegisterExecutor - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisterExecutor(String, org.apache.spark.rpc.RpcEndpointRef, int, Map<String, String>) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor
 
CoarseGrainedClusterMessages.RegisterExecutor$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisterExecutor$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor$
 
CoarseGrainedClusterMessages.RegisterExecutorFailed - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisterExecutorFailed(String) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutorFailed
 
CoarseGrainedClusterMessages.RegisterExecutorFailed$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RegisterExecutorFailed$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutorFailed$
 
CoarseGrainedClusterMessages.RegisterExecutorResponse - Interface in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RemoveExecutor - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RemoveExecutor(String, org.apache.spark.scheduler.ExecutorLossReason) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RemoveExecutor
 
CoarseGrainedClusterMessages.RemoveExecutor$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RemoveExecutor$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RemoveExecutor$
 
CoarseGrainedClusterMessages.RequestExecutors - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RequestExecutors(int, int, Map<String, Object>) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RequestExecutors
 
CoarseGrainedClusterMessages.RequestExecutors$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RequestExecutors$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RequestExecutors$
 
CoarseGrainedClusterMessages.RetrieveLastAllocatedExecutorId$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RetrieveLastAllocatedExecutorId$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RetrieveLastAllocatedExecutorId$
 
CoarseGrainedClusterMessages.RetrieveSparkProps$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.RetrieveSparkProps$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RetrieveSparkProps$
 
CoarseGrainedClusterMessages.ReviveOffers$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.ReviveOffers$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.ReviveOffers$
 
CoarseGrainedClusterMessages.SetupDriver - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.SetupDriver(org.apache.spark.rpc.RpcEndpointRef) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.SetupDriver
 
CoarseGrainedClusterMessages.SetupDriver$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.SetupDriver$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.SetupDriver$
 
CoarseGrainedClusterMessages.Shutdown$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.Shutdown$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.Shutdown$
 
CoarseGrainedClusterMessages.StatusUpdate - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.StatusUpdate(String, long, Enumeration.Value, org.apache.spark.util.SerializableBuffer) - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate
 
CoarseGrainedClusterMessages.StatusUpdate$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.StatusUpdate$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate$
 
CoarseGrainedClusterMessages.StopDriver$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.StopDriver$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StopDriver$
 
CoarseGrainedClusterMessages.StopExecutor$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.StopExecutor$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StopExecutor$
 
CoarseGrainedClusterMessages.StopExecutors$ - Class in org.apache.spark.scheduler.cluster
 
CoarseGrainedClusterMessages.StopExecutors$() - Constructor for class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StopExecutors$
 
code() - Method in class org.apache.spark.mllib.feature.VocabWord
 
codeLen() - Method in class org.apache.spark.mllib.feature.VocabWord
 
coefficients() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
coefficients() - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
coefficients() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
coefficients() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
 
coefficientStandardErrors() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionTrainingSummary
Standard error of estimated coefficients and intercept.
coefficientStandardErrors() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Standard error of estimated coefficients and intercept.
cogroup(JavaPairRDD<K, W>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other, return a resulting RDD that contains a tuple with the list of values for that key in this as well as other.
cogroup(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other1 or other2, return a resulting RDD that contains a tuple with the list of values for that key in this, other1 and other2.
cogroup(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>, JavaPairRDD<K, W3>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other1 or other2 or other3, return a resulting RDD that contains a tuple with the list of values for that key in this, other1, other2 and other3.
cogroup(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other, return a resulting RDD that contains a tuple with the list of values for that key in this as well as other.
cogroup(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other1 or other2, return a resulting RDD that contains a tuple with the list of values for that key in this, other1 and other2.
cogroup(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>, JavaPairRDD<K, W3>) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other1 or other2 or other3, return a resulting RDD that contains a tuple with the list of values for that key in this, other1, other2 and other3.
cogroup(JavaPairRDD<K, W>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other, return a resulting RDD that contains a tuple with the list of values for that key in this as well as other.
cogroup(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other1 or other2, return a resulting RDD that contains a tuple with the list of values for that key in this, other1 and other2.
cogroup(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>, JavaPairRDD<K, W3>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
For each key k in this or other1 or other2 or other3, return a resulting RDD that contains a tuple with the list of values for that key in this, other1, other2 and other3.
cogroup(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>, RDD<Tuple2<K, W3>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other1 or other2 or other3, return a resulting RDD that contains a tuple with the list of values for that key in this, other1, other2 and other3.
cogroup(RDD<Tuple2<K, W>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other, return a resulting RDD that contains a tuple with the list of values for that key in this as well as other.
cogroup(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other1 or other2, return a resulting RDD that contains a tuple with the list of values for that key in this, other1 and other2.
cogroup(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>, RDD<Tuple2<K, W3>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other1 or other2 or other3, return a resulting RDD that contains a tuple with the list of values for that key in this, other1, other2 and other3.
cogroup(RDD<Tuple2<K, W>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other, return a resulting RDD that contains a tuple with the list of values for that key in this as well as other.
cogroup(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other1 or other2, return a resulting RDD that contains a tuple with the list of values for that key in this, other1 and other2.
cogroup(RDD<Tuple2<K, W>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other, return a resulting RDD that contains a tuple with the list of values for that key in this as well as other.
cogroup(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other1 or other2, return a resulting RDD that contains a tuple with the list of values for that key in this, other1 and other2.
cogroup(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>, RDD<Tuple2<K, W3>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
For each key k in this or other1 or other2 or other3, return a resulting RDD that contains a tuple with the list of values for that key in this, other1, other2 and other3.
cogroup(KeyValueGroupedDataset<K, U>, Function3<K, Iterator<V>, Iterator<U>, TraversableOnce<R>>, Encoder<R>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Applies the given function to each cogrouped data.
cogroup(KeyValueGroupedDataset<K, U>, CoGroupFunction<K, V, U, R>, Encoder<R>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Applies the given function to each cogrouped data.
cogroup(JavaPairDStream<K, W>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'cogroup' between RDDs of this DStream and other DStream.
cogroup(JavaPairDStream<K, W>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'cogroup' between RDDs of this DStream and other DStream.
cogroup(JavaPairDStream<K, W>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'cogroup' between RDDs of this DStream and other DStream.
cogroup(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
cogroup(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
cogroup(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
cogroup(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
cogroup(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
cogroup(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
cogroup(DStream<Tuple2<K, W>>, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'cogroup' between RDDs of this DStream and other DStream.
cogroup(DStream<Tuple2<K, W>>, int, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'cogroup' between RDDs of this DStream and other DStream.
cogroup(DStream<Tuple2<K, W>>, Partitioner, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'cogroup' between RDDs of this DStream and other DStream.
CoGroupedRDD<K> - Class in org.apache.spark.rdd
:: DeveloperApi :: A RDD that cogroups its parents.
CoGroupedRDD(Seq<RDD<? extends Product2<K, ?>>>, Partitioner, ClassTag<K>) - Constructor for class org.apache.spark.rdd.CoGroupedRDD
 
CoGroupFunction<K,V1,V2,R> - Interface in org.apache.spark.api.java.function
A function that returns zero or more output records from each grouping key and its values from 2 Datasets.
col(String) - Method in class org.apache.spark.sql.Dataset
Selects column based on the column name and return it as a Column.
col(String) - Static method in class org.apache.spark.sql.functions
Returns a Column based on the given column name.
colIter() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
colIter() - Method in interface org.apache.spark.ml.linalg.Matrix
Returns an iterator of column vectors.
colIter() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
colIter() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
colIter() - Method in interface org.apache.spark.mllib.linalg.Matrix
Returns an iterator of column vectors.
colIter() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
collect() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
collect() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
collect() - Static method in class org.apache.spark.api.java.JavaRDD
 
collect() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an array that contains all of the elements in this RDD.
collect() - Static method in class org.apache.spark.api.r.RRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
collect() - Static method in class org.apache.spark.graphx.EdgeRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
collect() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
collect() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
collect() - Static method in class org.apache.spark.graphx.VertexRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
collect() - Static method in class org.apache.spark.rdd.HadoopRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
collect() - Static method in class org.apache.spark.rdd.JdbcRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
collect() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
collect() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
collect(PartialFunction<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
collect() - Method in class org.apache.spark.rdd.RDD
Return an array that contains all of the elements in this RDD.
collect(PartialFunction<T, U>, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Return an RDD that contains all matching values by applying f.
collect() - Method in class org.apache.spark.sql.Dataset
Returns an array that contains all of Rows in this Dataset.
collect(PartialFunction<BaseType, B>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
collect(PartialFunction<A, B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
collect_list(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns a list of objects with duplicates.
collect_list(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns a list of objects with duplicates.
collect_set(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns a set of objects with duplicate elements eliminated.
collect_set(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns a set of objects with duplicate elements eliminated.
collectAsList() - Method in class org.apache.spark.sql.Dataset
Returns a Java list that contains all of Rows in this Dataset.
collectAsMap() - Method in class org.apache.spark.api.java.JavaPairRDD
Return the key-value pairs in this RDD to the master as a Map.
collectAsMap() - Method in class org.apache.spark.rdd.PairRDDFunctions
Return the key-value pairs in this RDD to the master as a Map.
collectAsync() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
collectAsync() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
collectAsync() - Static method in class org.apache.spark.api.java.JavaRDD
 
collectAsync() - Method in interface org.apache.spark.api.java.JavaRDDLike
The asynchronous version of collect, which returns a future for retrieving an array containing all of the elements in this RDD.
collectAsync() - Method in class org.apache.spark.rdd.AsyncRDDActions
Returns a future for retrieving all elements of this RDD.
collectEdges(EdgeDirection) - Method in class org.apache.spark.graphx.GraphOps
Returns an RDD that contains for each vertex v its local edges, i.e., the edges that are incident on v, in the user-specified direction.
collectFirst(PartialFunction<BaseType, B>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
collectFirst(PartialFunction<A, B>) - Static method in class org.apache.spark.sql.types.StructType
 
CollectionsUtils - Class in org.apache.spark.util
 
CollectionsUtils() - Constructor for class org.apache.spark.util.CollectionsUtils
 
collectNeighborIds(EdgeDirection) - Method in class org.apache.spark.graphx.GraphOps
Collect the neighbor vertex ids for each vertex.
collectNeighbors(EdgeDirection) - Method in class org.apache.spark.graphx.GraphOps
Collect the neighbor vertex attributes for each vertex.
collectPartitions(int[]) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
collectPartitions(int[]) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
collectPartitions(int[]) - Static method in class org.apache.spark.api.java.JavaRDD
 
collectPartitions(int[]) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an array that contains all of the elements in a specific partition of this RDD.
collectToPython() - Method in class org.apache.spark.sql.Dataset
 
colPtrs() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
colPtrs() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
colsPerBlock() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
colStats(RDD<Vector>) - Static method in class org.apache.spark.mllib.stat.Statistics
Computes column-wise summary statistics for the input RDD[Vector].
Column - Class in org.apache.spark.sql.catalog
 
Column(String, String, String, boolean, boolean, boolean) - Constructor for class org.apache.spark.sql.catalog.Column
 
Column - Class in org.apache.spark.sql
:: Experimental :: A column that will be computed based on the data in a DataFrame.
Column(Expression) - Constructor for class org.apache.spark.sql.Column
 
Column(String) - Constructor for class org.apache.spark.sql.Column
 
column(String) - Static method in class org.apache.spark.sql.functions
Returns a Column based on the given column name.
ColumnName - Class in org.apache.spark.sql
:: Experimental :: A convenient class used for constructing schema.
ColumnName(String) - Constructor for class org.apache.spark.sql.ColumnName
 
ColumnPruner - Class in org.apache.spark.ml.feature
Utility transformer for removing temporary columns from a DataFrame.
ColumnPruner(String, Set<String>) - Constructor for class org.apache.spark.ml.feature.ColumnPruner
 
ColumnPruner(Set<String>) - Constructor for class org.apache.spark.ml.feature.ColumnPruner
 
columns() - Method in class org.apache.spark.sql.Dataset
Returns all column names as an array.
columnSimilarities() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Compute all cosine similarities between columns of this matrix using the brute-force approach of computing normalized dot products.
columnSimilarities() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Compute all cosine similarities between columns of this matrix using the brute-force approach of computing normalized dot products.
columnSimilarities(double) - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Compute similarities between columns of this matrix using a sampling approach.
columnsToPrune() - Method in class org.apache.spark.ml.feature.ColumnPruner
 
combinations(int) - Static method in class org.apache.spark.sql.types.StructType
 
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean, Serializer) - Method in class org.apache.spark.api.java.JavaPairRDD
Generic function to combine the elements for each key using a custom set of aggregation functions.
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Generic function to combine the elements for each key using a custom set of aggregation functions.
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Simplified version of combineByKey that hash-partitions the output RDD and uses map-side aggregation.
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>) - Method in class org.apache.spark.api.java.JavaPairRDD
Simplified version of combineByKey that hash-partitions the resulting RDD using the existing partitioner/parallelism level and using map-side aggregation.
combineByKey(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean, Serializer) - Method in class org.apache.spark.rdd.PairRDDFunctions
Generic function to combine the elements for each key using a custom set of aggregation functions.
combineByKey(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Simplified version of combineByKeyWithClassTag that hash-partitions the output RDD.
combineByKey(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Simplified version of combineByKeyWithClassTag that hash-partitions the resulting RDD using the existing partitioner/parallelism level.
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Combine elements of each key in DStream's RDDs using custom function.
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Combine elements of each key in DStream's RDDs using custom function.
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
combineByKey(Function<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
combineByKey(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean, ClassTag<C>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Combine elements of each key in DStream's RDDs using custom functions.
combineByKeyWithClassTag(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>, Partitioner, boolean, Serializer, ClassTag<C>) - Method in class org.apache.spark.rdd.PairRDDFunctions
:: Experimental :: Generic function to combine the elements for each key using a custom set of aggregation functions.
combineByKeyWithClassTag(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>, int, ClassTag<C>) - Method in class org.apache.spark.rdd.PairRDDFunctions
:: Experimental :: Simplified version of combineByKeyWithClassTag that hash-partitions the output RDD.
combineByKeyWithClassTag(Function1<V, C>, Function2<C, V, C>, Function2<C, C, C>, ClassTag<C>) - Method in class org.apache.spark.rdd.PairRDDFunctions
:: Experimental :: Simplified version of combineByKeyWithClassTag that hash-partitions the resulting RDD using the existing partitioner/parallelism level.
combineCombinersByKey(Iterator<? extends Product2<K, C>>, TaskContext) - Method in class org.apache.spark.Aggregator
 
combineValuesByKey(Iterator<? extends Product2<K, V>>, TaskContext) - Method in class org.apache.spark.Aggregator
 
combiningStrategy() - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
combiningStrategy() - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
commit(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
commitTask(OutputCommitter, TaskAttemptContext, int, int) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
Commits a task output.
commonHeaderNodes() - Static method in class org.apache.spark.ui.UIUtils
 
companion() - Static method in class org.apache.spark.sql.types.StructType
 
compare(PartitionGroup, PartitionGroup) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
compare(Option<PartitionGroup>, Option<PartitionGroup>) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
compare(Decimal) - Method in class org.apache.spark.sql.types.Decimal
 
compare(RDDInfo) - Method in class org.apache.spark.storage.RDDInfo
 
compareTo(A) - Static method in class org.apache.spark.sql.types.Decimal
 
compareTo(A) - Static method in class org.apache.spark.storage.RDDInfo
 
compareTo(SparkShutdownHook) - Method in class org.apache.spark.util.SparkShutdownHook
 
completed() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
completedIndices() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
completedJobs() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
completedStageIndices() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
completedStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
completedTasks() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
completionTime() - Method in class org.apache.spark.scheduler.StageInfo
Time when all tasks in the stage completed or when the stage was cancelled.
completionTime() - Method in class org.apache.spark.status.api.v1.JobData
 
completionTime() - Method in class org.apache.spark.status.api.v1.StageData
 
completionTime() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
ComplexFutureAction<T> - Class in org.apache.spark
A FutureAction for actions that could trigger multiple Spark jobs.
ComplexFutureAction(Function1<JobSubmitter, Future<T>>) - Constructor for class org.apache.spark.ComplexFutureAction
 
compose(Function1<A, T1>) - Static method in class org.apache.spark.sql.types.StructType
 
compressed() - Static method in class org.apache.spark.ml.linalg.DenseVector
 
compressed() - Static method in class org.apache.spark.ml.linalg.SparseVector
 
compressed() - Method in interface org.apache.spark.ml.linalg.Vector
Returns a vector in either dense or sparse format, whichever uses less storage.
compressed() - Static method in class org.apache.spark.mllib.linalg.DenseVector
 
compressed() - Static method in class org.apache.spark.mllib.linalg.SparseVector
 
compressed() - Method in interface org.apache.spark.mllib.linalg.Vector
Returns a vector in either dense or sparse format, whichever uses less storage.
compressedInputStream(InputStream) - Method in interface org.apache.spark.io.CompressionCodec
 
compressedInputStream(InputStream) - Method in class org.apache.spark.io.LZ4CompressionCodec
 
compressedInputStream(InputStream) - Method in class org.apache.spark.io.LZFCompressionCodec
 
compressedInputStream(InputStream) - Method in class org.apache.spark.io.SnappyCompressionCodec
 
compressedOutputStream(OutputStream) - Method in interface org.apache.spark.io.CompressionCodec
 
compressedOutputStream(OutputStream) - Method in class org.apache.spark.io.LZ4CompressionCodec
 
compressedOutputStream(OutputStream) - Method in class org.apache.spark.io.LZFCompressionCodec
 
compressedOutputStream(OutputStream) - Method in class org.apache.spark.io.SnappyCompressionCodec
 
CompressionCodec - Interface in org.apache.spark.io
:: DeveloperApi :: CompressionCodec allows the customization of choosing different compression implementations to be used in block storage.
compute(Partition, TaskContext) - Method in class org.apache.spark.api.r.BaseRRDD
 
compute(Partition, TaskContext) - Static method in class org.apache.spark.api.r.RRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.graphx.EdgeRDD
 
compute(Partition, TaskContext) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
compute(Partition, TaskContext) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
compute(Partition, TaskContext) - Method in class org.apache.spark.graphx.VertexRDD
Provides the RDD[(VertexId, VD)] equivalent output.
compute(Vector, double, Vector) - Method in class org.apache.spark.mllib.optimization.Gradient
Compute the gradient and loss given the features of a single data point.
compute(Vector, double, Vector, Vector) - Method in class org.apache.spark.mllib.optimization.Gradient
Compute the gradient and loss given the features of a single data point, add the gradient to a provided vector to avoid creating new objects, and return loss.
compute(Vector, double, Vector) - Method in class org.apache.spark.mllib.optimization.HingeGradient
 
compute(Vector, double, Vector, Vector) - Method in class org.apache.spark.mllib.optimization.HingeGradient
 
compute(Vector, Vector, double, int, double) - Method in class org.apache.spark.mllib.optimization.L1Updater
 
compute(Vector, double, Vector) - Method in class org.apache.spark.mllib.optimization.LeastSquaresGradient
 
compute(Vector, double, Vector, Vector) - Method in class org.apache.spark.mllib.optimization.LeastSquaresGradient
 
compute(Vector, double, Vector, Vector) - Method in class org.apache.spark.mllib.optimization.LogisticGradient
 
compute(Vector, Vector, double, int, double) - Method in class org.apache.spark.mllib.optimization.SimpleUpdater
 
compute(Vector, Vector, double, int, double) - Method in class org.apache.spark.mllib.optimization.SquaredL2Updater
 
compute(Vector, Vector, double, int, double) - Method in class org.apache.spark.mllib.optimization.Updater
Compute an updated value for weights given the gradient, stepSize, iteration number and regularization parameter.
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.CoGroupedRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.HadoopRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.JdbcRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.NewHadoopRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.PartitionPruningRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.RDD
:: DeveloperApi :: Implemented by subclasses to compute a given partition.
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.ShuffledRDD
 
compute(Partition, TaskContext) - Method in class org.apache.spark.rdd.UnionRDD
 
compute(Time) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Generate an RDD for the given duration
compute(Time) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
compute(Time) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Method that generates a RDD for the given Duration
compute(Time) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
compute(Time) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
compute(Time) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
compute(Time) - Method in class org.apache.spark.streaming.dstream.ConstantInputDStream
 
compute(Time) - Method in class org.apache.spark.streaming.dstream.DStream
Method that generates a RDD for the given time
compute(Time) - Method in class org.apache.spark.streaming.dstream.ReceiverInputDStream
 
computeColumnSummaryStatistics() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Computes column-wise summary statistics.
computeCorrelation(RDD<Object>, RDD<Object>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
Compute the Pearson correlation for two datasets.
computeCorrelation(RDD<Object>, RDD<Object>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
Compute Spearman's correlation for two datasets.
computeCorrelationMatrix(RDD<Vector>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
Compute the Pearson correlation matrix S, for the input matrix, where S(i, j) is the correlation between column i and j.
computeCorrelationMatrix(RDD<Vector>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
Compute Spearman's correlation matrix S, for the input matrix, where S(i, j) is the correlation between column i and j.
computeCorrelationMatrixFromCovariance(Matrix) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
Compute the Pearson correlation matrix from the covariance matrix.
computeCorrelationWithMatrixImpl(RDD<Object>, RDD<Object>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
computeCorrelationWithMatrixImpl(RDD<Object>, RDD<Object>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
computeCost(Dataset<?>) - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
Computes the sum of squared distances between the input points and their corresponding cluster centers.
computeCost(Dataset<?>) - Method in class org.apache.spark.ml.clustering.KMeansModel
Return the K-means cost (sum of squared distances of points to their nearest center) for this model on the given data.
computeCost(Vector) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Computes the squared distance between the input point and the cluster center it belongs to.
computeCost(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Computes the sum of squared distances between the input points and their corresponding cluster centers.
computeCost(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Java-friendly version of computeCost().
computeCost(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.KMeansModel
Return the K-means cost (sum of squared distances of points to their nearest center) for this model on the given data.
computeCovariance() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Computes the covariance matrix, treating each row as an observation.
computeError(RDD<LabeledPoint>, DecisionTreeRegressionModel[], double[], Loss) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Method to calculate error of the base learner for the gradient boosting calculation.
computeError(org.apache.spark.mllib.tree.model.TreeEnsembleModel, RDD<LabeledPoint>) - Method in interface org.apache.spark.mllib.tree.loss.Loss
Method to calculate error of the base learner for the gradient boosting calculation.
computeError(double, double) - Method in interface org.apache.spark.mllib.tree.loss.Loss
Method to calculate loss when the predictions are already known.
computeFractionForSampleSize(int, long, boolean) - Static method in class org.apache.spark.util.random.SamplingUtils
Returns a sampling rate that guarantees a sample of size >= sampleSizeLowerBound 99.99% of the time.
computeGramianMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Computes the Gramian matrix A^T A.
computeGramianMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Computes the Gramian matrix A^T A.
computeInitialPredictionAndError(RDD<LabeledPoint>, double, DecisionTreeRegressionModel, Loss) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Compute the initial predictions and errors for a dataset for the first iteration of gradient boosting.
computeInitialPredictionAndError(RDD<LabeledPoint>, double, DecisionTreeModel, Loss) - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
:: DeveloperApi :: Compute the initial predictions and errors for a dataset for the first iteration of gradient boosting.
computePreferredLocations(Seq<InputFormatInfo>) - Static method in class org.apache.spark.scheduler.InputFormatInfo
Computes the preferred locations based on input(s) and returned a location to block map.
computePrincipalComponents(int) - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Computes the top k principal components only.
computePrincipalComponentsAndExplainedVariance(int) - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Computes the top k principal components and a vector of proportions of variance explained by each principal component.
computeSVD(int, boolean, double) - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Computes the singular value decomposition of this IndexedRowMatrix.
computeSVD(int, boolean, double) - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Computes singular value decomposition of this matrix.
computeThresholdByKey(Map<K, AcceptanceResult>, Map<K, Object>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
Given the result returned by getCounts, determine the threshold for accepting items to generate exact sample size.
concat(Column...) - Static method in class org.apache.spark.sql.functions
Concatenates multiple input string columns together into a single string column.
concat(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Concatenates multiple input string columns together into a single string column.
concat_ws(String, Column...) - Static method in class org.apache.spark.sql.functions
Concatenates multiple input string columns together into a single string column, using the given separator.
concat_ws(String, Seq<Column>) - Static method in class org.apache.spark.sql.functions
Concatenates multiple input string columns together into a single string column, using the given separator.
conf() - Method in class org.apache.spark.SparkEnv
 
conf() - Method in class org.apache.spark.sql.SparkSession
Runtime configuration interface for Spark.
conf() - Method in class org.apache.spark.sql.SQLContext
 
confidence() - Method in class org.apache.spark.mllib.fpm.AssociationRules.Rule
Returns the confidence of the rule.
confidence() - Method in class org.apache.spark.partial.BoundedDouble
 
confidence() - Method in class org.apache.spark.util.sketch.CountMinSketch
Returns the confidence (or delta) of this CountMinSketch.
config(String, String) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets a config option.
config(String, long) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets a config option.
config(String, double) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets a config option.
config(String, boolean) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets a config option.
config(SparkConf) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets a list of config options based on the given SparkConf.
config() - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
ConfigEntryWithDefault<T> - Class in org.apache.spark.internal.config
 
ConfigEntryWithDefault(String, T, Function1<String, T>, Function1<T, String>, String, boolean) - Constructor for class org.apache.spark.internal.config.ConfigEntryWithDefault
 
ConfigHelpers - Class in org.apache.spark.internal.config
 
ConfigHelpers() - Constructor for class org.apache.spark.internal.config.ConfigHelpers
 
configTestLog4j(String) - Static method in class org.apache.spark.util.Utils
config a log4j properties used for testsuite
configuration() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
CONFIGURATION_INSTANTIATION_LOCK() - Static method in class org.apache.spark.rdd.HadoopRDD
Configuration's constructor is not threadsafe (see SPARK-1097 and HADOOP-10456).
CONFIGURATION_INSTANTIATION_LOCK() - Static method in class org.apache.spark.rdd.NewHadoopRDD
Configuration's constructor is not threadsafe (see SPARK-1097 and HADOOP-10456).
configureJobPropertiesForStorageHandler(TableDesc, JobConf, boolean) - Static method in class org.apache.spark.sql.hive.HiveTableUtil
 
confusionMatrix() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns confusion matrix: predicted classes are in columns, they are ordered by class label ascending, as in "labels"
connect(String, int) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
connectedComponents() - Method in class org.apache.spark.graphx.GraphOps
Compute the connected component membership of each vertex and return a graph with the vertex value containing the lowest vertex id in the connected component containing that vertex.
connectedComponents(int) - Method in class org.apache.spark.graphx.GraphOps
Compute the connected component membership of each vertex and return a graph with the vertex value containing the lowest vertex id in the connected component containing that vertex.
ConnectedComponents - Class in org.apache.spark.graphx.lib
Connected components algorithm.
ConnectedComponents() - Constructor for class org.apache.spark.graphx.lib.ConnectedComponents
 
connectLeader(String, int) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
consequent() - Method in class org.apache.spark.mllib.fpm.AssociationRules.Rule
 
ConstantInputDStream<T> - Class in org.apache.spark.streaming.dstream
An input stream that always returns the same RDD on each time step.
ConstantInputDStream(StreamingContext, RDD<T>, ClassTag<T>) - Constructor for class org.apache.spark.streaming.dstream.ConstantInputDStream
 
constraints() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
constructTree(org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0.NodeData[]) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
Given a list of nodes from a tree, construct the tree.
constructTrees(RDD<org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0.NodeData>) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
 
constructURIForAuthentication(URI, org.apache.spark.SecurityManager) - Static method in class org.apache.spark.util.Utils
Construct a URI container information used for authentication.
contains(Param<?>) - Method in class org.apache.spark.ml.param.ParamMap
Checks whether a parameter is explicitly specified.
contains(String) - Method in class org.apache.spark.SparkConf
Does the configuration contain a given parameter?
contains(Object) - Method in class org.apache.spark.sql.Column
Contains the other element.
contains(String) - Method in class org.apache.spark.sql.RuntimeConfig
Returns whether a particular key is set.
contains(String) - Method in class org.apache.spark.sql.types.Metadata
Tests whether this Metadata contains a binding for a key.
contains(A1) - Static method in class org.apache.spark.sql.types.StructType
 
containsBlock(BlockId) - Method in class org.apache.spark.storage.StorageStatus
Return whether the given block is stored in this block manager in O(1) time.
containsCachedMetadata(String) - Static method in class org.apache.spark.rdd.HadoopRDD
 
containsChild() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
containsNull() - Method in class org.apache.spark.sql.types.ArrayType
 
containsSlice(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
contentType() - Method in class org.apache.spark.ui.JettyUtils.ServletParams
 
context() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
context() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
context() - Static method in class org.apache.spark.api.java.JavaRDD
 
context() - Method in interface org.apache.spark.api.java.JavaRDDLike
The SparkContext that this RDD was created on.
context() - Static method in class org.apache.spark.api.r.RRDD
 
context() - Static method in class org.apache.spark.graphx.EdgeRDD
 
context() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
context() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
context() - Static method in class org.apache.spark.graphx.VertexRDD
 
context() - Method in class org.apache.spark.InterruptibleIterator
 
context(SQLContext) - Static method in class org.apache.spark.ml.r.RWrappers
 
context(SQLContext) - Method in class org.apache.spark.ml.util.MLReader
 
context(SQLContext) - Method in class org.apache.spark.ml.util.MLWriter
 
context() - Static method in class org.apache.spark.rdd.HadoopRDD
 
context() - Static method in class org.apache.spark.rdd.JdbcRDD
 
context() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
context() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
context() - Method in class org.apache.spark.rdd.RDD
The SparkContext that this RDD was created on.
context() - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
context() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return the StreamingContext associated with this DStream
context() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
context() - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
context() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
context() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
context() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
context() - Method in class org.apache.spark.streaming.dstream.DStream
Return the StreamingContext associated with this DStream
Continuous() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
ContinuousQuery - Interface in org.apache.spark.sql
:: Experimental :: A handle to a query that is executing continuously in the background as new data arrives.
ContinuousQueryException - Exception in org.apache.spark.sql
:: Experimental :: Exception that stopped a ContinuousQuery.
ContinuousQueryListener - Class in org.apache.spark.sql.util
:: Experimental :: Interface for listening to events related to ContinuousQueries.
ContinuousQueryListener() - Constructor for class org.apache.spark.sql.util.ContinuousQueryListener
 
ContinuousQueryListener.Event - Interface in org.apache.spark.sql.util
Base type of ContinuousQueryListener events
ContinuousQueryListener.QueryProgress - Class in org.apache.spark.sql.util
Event representing any progress updates in a query
ContinuousQueryListener.QueryStarted - Class in org.apache.spark.sql.util
Event representing the start of a query
ContinuousQueryListener.QueryTerminated - Class in org.apache.spark.sql.util
Event representing that termination of a query
ContinuousQueryManager - Class in org.apache.spark.sql
:: Experimental :: A class to manage all the ContinuousQueries active on a SparkSession.
ContinuousQueryManager(SparkSession) - Constructor for class org.apache.spark.sql.ContinuousQueryManager
 
ContinuousSplit - Class in org.apache.spark.ml.tree
:: DeveloperApi :: Split which tests a continuous feature.
conv(Column, int, int) - Static method in class org.apache.spark.sql.functions
Convert a number in a string column from one base to another.
CONVERT_CTAS() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
CONVERT_METASTORE_ORC() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
CONVERT_METASTORE_PARQUET() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
CONVERT_METASTORE_PARQUET_WITH_SCHEMA_MERGING() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
convertToCanonicalEdges(Function2<ED, ED, ED>) - Method in class org.apache.spark.graphx.GraphOps
Convert bi-directional edges into uni-directional ones.
convertToTimeUnit(long, TimeUnit) - Static method in class org.apache.spark.streaming.ui.UIUtils
Convert milliseconds to the specified unit.
CoordinateMatrix - Class in org.apache.spark.mllib.linalg.distributed
Represents a matrix in coordinate format.
CoordinateMatrix(RDD<MatrixEntry>, long, long) - Constructor for class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
 
CoordinateMatrix(RDD<MatrixEntry>) - Constructor for class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Alternative constructor leaving matrix dimensions to be determined automatically.
copy(ParamMap) - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.LogisticRegression
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.NaiveBayes
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.OneVsRest
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.OneVsRestModel
 
copy(ParamMap) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.KMeans
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.KMeansModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.LDA
 
copy(ParamMap) - Method in class org.apache.spark.ml.clustering.LocalLDAModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.Estimator
 
copy(ParamMap) - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
copy(ParamMap) - Method in class org.apache.spark.ml.evaluation.Evaluator
 
copy(ParamMap) - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
copy(ParamMap) - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.Binarizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.Bucketizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.ColumnPruner
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
copy(ParamMap) - Static method in class org.apache.spark.ml.feature.DCT
 
copy(ParamMap) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.HashingTF
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.IDF
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.IDFModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.IndexToString
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.Interaction
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.MaxAbsScaler
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
copy(ParamMap) - Static method in class org.apache.spark.ml.feature.NGram
 
copy(ParamMap) - Static method in class org.apache.spark.ml.feature.Normalizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.PCA
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.PCAModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.PolynomialExpansion
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.RFormula
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.RFormulaModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.SQLTransformer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.StandardScaler
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.StringIndexer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.Tokenizer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.VectorAssembler
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.VectorIndexer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.Word2Vec
 
copy(ParamMap) - Method in class org.apache.spark.ml.feature.Word2VecModel
 
copy(Vector, Vector) - Static method in class org.apache.spark.ml.linalg.BLAS
y = x
copy() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
copy() - Method in class org.apache.spark.ml.linalg.DenseVector
 
copy() - Method in interface org.apache.spark.ml.linalg.Matrix
Get a deep copy of the matrix.
copy() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
copy() - Method in class org.apache.spark.ml.linalg.SparseVector
 
copy() - Method in interface org.apache.spark.ml.linalg.Vector
Makes a deep copy of this vector.
copy(ParamMap) - Method in class org.apache.spark.ml.Model
 
copy() - Method in class org.apache.spark.ml.param.ParamMap
Creates a copy of this param map.
copy(ParamMap) - Method in interface org.apache.spark.ml.param.Params
Creates a copy of this instance with the same UID and some extra params.
copy(ParamMap) - Method in class org.apache.spark.ml.Pipeline
 
copy(ParamMap) - Method in class org.apache.spark.ml.PipelineModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.PipelineStage
 
copy(ParamMap) - Method in class org.apache.spark.ml.Predictor
 
copy(ParamMap) - Method in class org.apache.spark.ml.recommendation.ALS
 
copy(ParamMap) - Method in class org.apache.spark.ml.recommendation.ALSModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.LinearRegression
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.LinearRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
copy(ParamMap) - Method in class org.apache.spark.ml.Transformer
 
copy(ParamMap) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
copy(ParamMap) - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
copy(ParamMap) - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
copy(ParamMap) - Method in class org.apache.spark.ml.UnaryTransformer
 
copy(Vector, Vector) - Static method in class org.apache.spark.mllib.linalg.BLAS
y = x
copy() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
copy() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
copy() - Method in interface org.apache.spark.mllib.linalg.Matrix
Get a deep copy of the matrix.
copy() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
copy() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
copy() - Method in interface org.apache.spark.mllib.linalg.Vector
Makes a deep copy of this vector.
copy() - Method in class org.apache.spark.mllib.random.ExponentialGenerator
 
copy() - Method in class org.apache.spark.mllib.random.GammaGenerator
 
copy() - Method in class org.apache.spark.mllib.random.LogNormalGenerator
 
copy() - Method in class org.apache.spark.mllib.random.PoissonGenerator
 
copy() - Method in interface org.apache.spark.mllib.random.RandomDataGenerator
Returns a copy of the RandomDataGenerator with a new instance of the rng object used in the class when applicable for non-locking concurrent usage.
copy() - Method in class org.apache.spark.mllib.random.StandardNormalGenerator
 
copy() - Method in class org.apache.spark.mllib.random.UniformGenerator
 
copy() - Method in class org.apache.spark.mllib.random.WeibullGenerator
 
copy() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
Returns a shallow copy of this instance.
copy(Kryo, T) - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
copy() - Method in interface org.apache.spark.sql.Row
Make a copy of the current Row object.
copy() - Method in class org.apache.spark.util.AccumulatorV2
Creates a new copy of this accumulator.
copy() - Method in class org.apache.spark.util.DoubleAccumulator
 
copy() - Method in class org.apache.spark.util.LegacyAccumulatorWrapper
 
copy() - Method in class org.apache.spark.util.ListAccumulator
 
copy() - Method in class org.apache.spark.util.LongAccumulator
 
copy() - Method in class org.apache.spark.util.StatCounter
Clone this StatCounter
copyAndReset() - Method in class org.apache.spark.util.AccumulatorV2
Creates a new copy of this accumulator, which is zero value.
copyAndReset() - Method in class org.apache.spark.util.ListAccumulator
 
copyStream(InputStream, OutputStream, boolean, boolean) - Static method in class org.apache.spark.util.Utils
Copy all data from an InputStream to an OutputStream.
copyToArray(Object, int) - Static method in class org.apache.spark.sql.types.StructType
 
copyToArray(Object) - Static method in class org.apache.spark.sql.types.StructType
 
copyToArray(Object, int, int) - Static method in class org.apache.spark.sql.types.StructType
 
copyToBuffer(Buffer<B>) - Static method in class org.apache.spark.sql.types.StructType
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.KMeans
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.LDA
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Binarizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.DCT
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.HashingTF
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.IDF
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.IDFModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.IndexToString
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Interaction
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.NGram
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Normalizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.PCA
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.PCAModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.RFormula
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
copyValues(T, ParamMap) - Method in interface org.apache.spark.ml.param.Params
Copies param values from this instance to another instance for params shared by them.
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.Pipeline
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.PipelineModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.recommendation.ALS
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
copyValues(T, ParamMap) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.KMeans
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.LDA
 
copyValues$default$2() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
copyValues$default$2() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
copyValues$default$2() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Binarizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.DCT
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.HashingTF
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.IDF
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.IDFModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.IndexToString
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Interaction
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.NGram
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Normalizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.PCA
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.PCAModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.RFormula
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
copyValues$default$2() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.Pipeline
 
copyValues$default$2() - Static method in class org.apache.spark.ml.PipelineModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.recommendation.ALS
 
copyValues$default$2() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
copyValues$default$2() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
copyValues$default$2() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
copyValues$default$2() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
copyValues$default$2() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
cores() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor
 
coresGranted() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
coresPerExecutor() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
corr(RDD<Object>, RDD<Object>, String) - Static method in class org.apache.spark.mllib.stat.correlation.Correlations
 
corr(RDD<Vector>) - Static method in class org.apache.spark.mllib.stat.Statistics
Compute the Pearson correlation matrix for the input RDD of Vectors.
corr(RDD<Vector>, String) - Static method in class org.apache.spark.mllib.stat.Statistics
Compute the correlation matrix for the input RDD of Vectors using the specified method.
corr(RDD<Object>, RDD<Object>) - Static method in class org.apache.spark.mllib.stat.Statistics
Compute the Pearson correlation for the input RDDs.
corr(JavaRDD<Double>, JavaRDD<Double>) - Static method in class org.apache.spark.mllib.stat.Statistics
Java-friendly version of corr()
corr(RDD<Object>, RDD<Object>, String) - Static method in class org.apache.spark.mllib.stat.Statistics
Compute the correlation for the input RDDs using the specified method.
corr(JavaRDD<Double>, JavaRDD<Double>, String) - Static method in class org.apache.spark.mllib.stat.Statistics
Java-friendly version of corr()
corr(String, String, String) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Calculates the correlation of two columns of a DataFrame.
corr(String, String) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Calculates the Pearson Correlation Coefficient of two columns of a DataFrame.
corr(Column, Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the Pearson Correlation Coefficient for two columns.
corr(String, String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the Pearson Correlation Coefficient for two columns.
CorrelationNames - Class in org.apache.spark.mllib.stat.correlation
Maintains supported and default correlation names.
CorrelationNames() - Constructor for class org.apache.spark.mllib.stat.correlation.CorrelationNames
 
Correlations - Class in org.apache.spark.mllib.stat.correlation
Delegates computation to the specific correlation object based on the input method name.
Correlations() - Constructor for class org.apache.spark.mllib.stat.correlation.Correlations
 
corresponds(GenSeq<B>, Function2<A, B, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
corrMatrix(RDD<Vector>, String) - Static method in class org.apache.spark.mllib.stat.correlation.Correlations
 
cos(Column) - Static method in class org.apache.spark.sql.functions
Computes the cosine of the given value.
cos(String) - Static method in class org.apache.spark.sql.functions
Computes the cosine of the given column.
cosh(Column) - Static method in class org.apache.spark.sql.functions
Computes the hyperbolic cosine of the given value.
cosh(String) - Static method in class org.apache.spark.sql.functions
Computes the hyperbolic cosine of the given column.
count() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
count() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
count() - Static method in class org.apache.spark.api.java.JavaRDD
 
count() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return the number of elements in the RDD.
count() - Static method in class org.apache.spark.api.r.RRDD
 
count() - Static method in class org.apache.spark.graphx.EdgeRDD
 
count() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
The number of edges in the RDD.
count() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
The number of vertices in the RDD.
count() - Static method in class org.apache.spark.graphx.VertexRDD
 
count() - Method in class org.apache.spark.ml.regression.AFTAggregator
 
count() - Method in class org.apache.spark.ml.regression.LeastSquaresAggregator
 
count() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Sample size.
count() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Sample size.
count() - Static method in class org.apache.spark.rdd.HadoopRDD
 
count() - Static method in class org.apache.spark.rdd.JdbcRDD
 
count() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
count() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
count() - Method in class org.apache.spark.rdd.RDD
Return the number of elements in the RDD.
count() - Method in class org.apache.spark.sql.Dataset
Returns the number of rows in the Dataset.
count(MapFunction<T, Object>) - Static method in class org.apache.spark.sql.expressions.javalang.typed
Count aggregate function.
count(Function1<IN, Object>) - Static method in class org.apache.spark.sql.expressions.scalalang.typed
Count aggregate function.
count(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the number of items in a group.
count(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the number of items in a group.
count() - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Returns a Dataset that contains a tuple with each key and the number of items present for that key.
count() - Method in class org.apache.spark.sql.RelationalGroupedDataset
Count the number of rows for each group.
count(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
count() - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
count() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD has a single element generated by counting each RDD of this DStream.
count() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
count() - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
count() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
count() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
count() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
count() - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD has a single element generated by counting each RDD of this DStream.
count() - Method in class org.apache.spark.streaming.kafka.OffsetRange
Number of messages this OffsetRange refers to
count() - Method in class org.apache.spark.util.DoubleAccumulator
Returns the number of elements added to the accumulator.
count() - Method in class org.apache.spark.util.LongAccumulator
Returns the number of elements added to the accumulator.
count() - Method in class org.apache.spark.util.StatCounter
 
countApprox(long, double) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countApprox(long) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countApprox(long, double) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countApprox(long) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countApprox(long, double) - Static method in class org.apache.spark.api.java.JavaRDD
 
countApprox(long) - Static method in class org.apache.spark.api.java.JavaRDD
 
countApprox(long, double) - Method in interface org.apache.spark.api.java.JavaRDDLike
Approximate version of count() that returns a potentially incomplete result within a timeout, even if not all tasks have finished.
countApprox(long) - Method in interface org.apache.spark.api.java.JavaRDDLike
Approximate version of count() that returns a potentially incomplete result within a timeout, even if not all tasks have finished.
countApprox(long, double) - Static method in class org.apache.spark.api.r.RRDD
 
countApprox(long, double) - Static method in class org.apache.spark.graphx.EdgeRDD
 
countApprox(long, double) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countApprox(long, double) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countApprox(long, double) - Static method in class org.apache.spark.graphx.VertexRDD
 
countApprox(long, double) - Static method in class org.apache.spark.rdd.HadoopRDD
 
countApprox(long, double) - Static method in class org.apache.spark.rdd.JdbcRDD
 
countApprox(long, double) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countApprox(long, double) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countApprox(long, double) - Method in class org.apache.spark.rdd.RDD
Approximate version of count() that returns a potentially incomplete result within a timeout, even if not all tasks have finished.
countApprox$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
countApprox$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
countApprox$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countApprox$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countApprox$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
countApprox$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
countApprox$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
countApprox$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countApprox$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.api.java.JavaRDD
 
countApproxDistinct(double) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return approximate number of distinct elements in the RDD.
countApproxDistinct(int, int) - Static method in class org.apache.spark.api.r.RRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.api.r.RRDD
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.graphx.EdgeRDD
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countApproxDistinct(double) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countApproxDistinct(double) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.graphx.VertexRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.graphx.VertexRDD
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.rdd.HadoopRDD
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.rdd.JdbcRDD
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countApproxDistinct(int, int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countApproxDistinct(double) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countApproxDistinct(int, int) - Method in class org.apache.spark.rdd.RDD
Return approximate number of distinct elements in the RDD.
countApproxDistinct(double) - Method in class org.apache.spark.rdd.RDD
Return approximate number of distinct elements in the RDD.
countApproxDistinct$default$1() - Static method in class org.apache.spark.api.r.RRDD
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.graphx.EdgeRDD
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.graphx.VertexRDD
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.rdd.HadoopRDD
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.rdd.JdbcRDD
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countApproxDistinct$default$1() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countApproxDistinctByKey(double, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Return approximate number of distinct values for each key in this RDD.
countApproxDistinctByKey(double, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return approximate number of distinct values for each key in this RDD.
countApproxDistinctByKey(double) - Method in class org.apache.spark.api.java.JavaPairRDD
Return approximate number of distinct values for each key in this RDD.
countApproxDistinctByKey(int, int, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return approximate number of distinct values for each key in this RDD.
countApproxDistinctByKey(double, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return approximate number of distinct values for each key in this RDD.
countApproxDistinctByKey(double, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return approximate number of distinct values for each key in this RDD.
countApproxDistinctByKey(double) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return approximate number of distinct values for each key in this RDD.
countAsync() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countAsync() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countAsync() - Static method in class org.apache.spark.api.java.JavaRDD
 
countAsync() - Method in interface org.apache.spark.api.java.JavaRDDLike
The asynchronous version of count, which returns a future for counting the number of elements in this RDD.
countAsync() - Method in class org.apache.spark.rdd.AsyncRDDActions
Returns a future for counting the number of elements in the RDD.
countByKey() - Method in class org.apache.spark.api.java.JavaPairRDD
Count the number of elements for each key, and return the result to the master as a Map.
countByKey() - Method in class org.apache.spark.rdd.PairRDDFunctions
Count the number of elements for each key, collecting the results to a local Map.
countByKeyApprox(long) - Method in class org.apache.spark.api.java.JavaPairRDD
Approximate version of countByKey that can return a partial result if it does not finish within a timeout.
countByKeyApprox(long, double) - Method in class org.apache.spark.api.java.JavaPairRDD
Approximate version of countByKey that can return a partial result if it does not finish within a timeout.
countByKeyApprox(long, double) - Method in class org.apache.spark.rdd.PairRDDFunctions
Approximate version of countByKey that can return a partial result if it does not finish within a timeout.
countByValue() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countByValue() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countByValue() - Static method in class org.apache.spark.api.java.JavaRDD
 
countByValue() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return the count of each unique value in this RDD as a map of (value, count) pairs.
countByValue(Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countByValue(Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countByValue(Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Return the count of each unique value in this RDD as a local map of (value, count) pairs.
countByValue() - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
countByValue(int) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
countByValue() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD contains the counts of each distinct value in each RDD of this DStream.
countByValue(int) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD contains the counts of each distinct value in each RDD of this DStream.
countByValue() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
countByValue(int) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
countByValue() - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
countByValue(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
countByValue() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
countByValue(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
countByValue() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
countByValue(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
countByValue() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
countByValue(int) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
countByValue(int, Ordering<T>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD contains the counts of each distinct value in each RDD of this DStream.
countByValue$default$1() - Static method in class org.apache.spark.api.r.RRDD
 
countByValue$default$1() - Static method in class org.apache.spark.graphx.EdgeRDD
 
countByValue$default$1() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countByValue$default$1() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countByValue$default$1() - Static method in class org.apache.spark.graphx.VertexRDD
 
countByValue$default$1() - Static method in class org.apache.spark.rdd.HadoopRDD
 
countByValue$default$1() - Static method in class org.apache.spark.rdd.JdbcRDD
 
countByValue$default$1() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countByValue$default$1() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countByValueAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
countByValueAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
countByValueAndWindow(Duration, Duration) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD contains the count of distinct elements in RDDs in a sliding window over this DStream.
countByValueAndWindow(Duration, Duration, int) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD contains the count of distinct elements in RDDs in a sliding window over this DStream.
countByValueAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
countByValueAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
countByValueAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
countByValueAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
countByValueAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
countByValueAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
countByValueAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
countByValueAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
countByValueAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
countByValueAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
countByValueAndWindow(Duration, Duration, int, Ordering<T>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD contains the count of distinct elements in RDDs in a sliding window over this DStream.
countByValueApprox(long, double) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countByValueApprox(long) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
countByValueApprox(long, double) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countByValueApprox(long) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
countByValueApprox(long, double) - Static method in class org.apache.spark.api.java.JavaRDD
 
countByValueApprox(long) - Static method in class org.apache.spark.api.java.JavaRDD
 
countByValueApprox(long, double) - Method in interface org.apache.spark.api.java.JavaRDDLike
Approximate version of countByValue().
countByValueApprox(long) - Method in interface org.apache.spark.api.java.JavaRDDLike
Approximate version of countByValue().
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countByValueApprox(long, double, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countByValueApprox(long, double, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Approximate version of countByValue().
countByValueApprox$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
countByValueApprox$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
countByValueApprox$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countByValueApprox$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countByValueApprox$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
countByValueApprox$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
countByValueApprox$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
countByValueApprox$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countByValueApprox$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.api.r.RRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.graphx.EdgeRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.graphx.VertexRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.rdd.HadoopRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.rdd.JdbcRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
countByValueApprox$default$3(long, double) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
countByWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
countByWindow(Duration, Duration) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD has a single element generated by counting the number of elements in a window over this DStream.
countByWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
countByWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
countByWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
countByWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
countByWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
countByWindow(Duration, Duration) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD has a single element generated by counting the number of elements in a sliding window over this DStream.
countDistinct(Column, Column...) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the number of distinct items in a group.
countDistinct(String, String...) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the number of distinct items in a group.
countDistinct(Column, Seq<Column>) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the number of distinct items in a group.
countDistinct(String, Seq<String>) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the number of distinct items in a group.
countMinSketch(String, int, int, int) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Count-min Sketch over a specified column.
countMinSketch(String, double, double, int) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Count-min Sketch over a specified column.
countMinSketch(Column, int, int, int) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Count-min Sketch over a specified column.
countMinSketch(Column, double, double, int) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Builds a Count-min Sketch over a specified column.
CountMinSketch - Class in org.apache.spark.util.sketch
A Count-min sketch is a probabilistic data structure used for summarizing streams of data in sub-linear space.
CountMinSketch() - Constructor for class org.apache.spark.util.sketch.CountMinSketch
 
CountMinSketch.Version - Enum in org.apache.spark.util.sketch
 
countTowardsTaskFailures() - Static method in class org.apache.spark.ExceptionFailure
 
countTowardsTaskFailures() - Method in class org.apache.spark.ExecutorLostFailure
 
countTowardsTaskFailures() - Static method in class org.apache.spark.FetchFailed
 
countTowardsTaskFailures() - Static method in class org.apache.spark.Resubmitted
 
countTowardsTaskFailures() - Method in class org.apache.spark.TaskCommitDenied
If a task failed because its attempt to commit was denied, do not count this failure towards failing the stage.
countTowardsTaskFailures() - Method in interface org.apache.spark.TaskFailedReason
Whether this task failure should be counted towards the maximum number of times the task is allowed to fail before the stage is aborted.
countTowardsTaskFailures() - Static method in class org.apache.spark.TaskKilled
 
countTowardsTaskFailures() - Static method in class org.apache.spark.TaskResultLost
 
countTowardsTaskFailures() - Static method in class org.apache.spark.UnknownReason
 
CountVectorizer - Class in org.apache.spark.ml.feature
:: Experimental :: Extracts a vocabulary from document collections and generates a CountVectorizerModel.
CountVectorizer(String) - Constructor for class org.apache.spark.ml.feature.CountVectorizer
 
CountVectorizer() - Constructor for class org.apache.spark.ml.feature.CountVectorizer
 
CountVectorizerModel - Class in org.apache.spark.ml.feature
:: Experimental :: Converts a text document to a sparse vector of token counts.
CountVectorizerModel(String, String[]) - Constructor for class org.apache.spark.ml.feature.CountVectorizerModel
 
CountVectorizerModel(String[]) - Constructor for class org.apache.spark.ml.feature.CountVectorizerModel
 
cov() - Method in class org.apache.spark.ml.stat.distribution.MultivariateGaussian
 
cov(String, String) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Calculate the sample covariance of two numerical columns of a DataFrame.
covar_pop(Column, Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the population covariance for two columns.
covar_pop(String, String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the population covariance for two columns.
covar_samp(Column, Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sample covariance for two columns.
covar_samp(String, String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sample covariance for two columns.
crc32(Column) - Static method in class org.apache.spark.sql.functions
Calculates the cyclic redundancy check value (CRC32) of a binary column and returns the value as a bigint.
CreatableRelationProvider - Interface in org.apache.spark.sql.sources
 
create(boolean, boolean, boolean, boolean, int) - Static method in class org.apache.spark.api.java.StorageLevels
Create a new StorageLevel object.
create(JavaSparkContext, JdbcRDD.ConnectionFactory, String, long, long, int, Function<ResultSet, T>) - Static method in class org.apache.spark.rdd.JdbcRDD
Create an RDD that executes an SQL query on a JDBC connection and reads results.
create(JavaSparkContext, JdbcRDD.ConnectionFactory, String, long, long, int) - Static method in class org.apache.spark.rdd.JdbcRDD
Create an RDD that executes an SQL query on a JDBC connection and reads results.
create(RDD<T>, Function1<Object, Object>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
Create a PartitionPruningRDD.
create(String) - Static method in class org.apache.spark.sql.ProcessingTime
Create a ProcessingTime.
create(long, TimeUnit) - Static method in class org.apache.spark.sql.ProcessingTime
Create a ProcessingTime.
create(Object...) - Static method in class org.apache.spark.sql.RowFactory
Create a Row from the given arguments.
create(String, int) - Static method in class org.apache.spark.streaming.kafka.Broker
 
create(String, int, long, long) - Static method in class org.apache.spark.streaming.kafka.OffsetRange
 
create(TopicAndPartition, long, long) - Static method in class org.apache.spark.streaming.kafka.OffsetRange
 
create(long) - Static method in class org.apache.spark.util.sketch.BloomFilter
Creates a BloomFilter with the expected number of insertions and a default expected false positive probability of 3%.
create(long, double) - Static method in class org.apache.spark.util.sketch.BloomFilter
Creates a BloomFilter with the expected number of insertions and expected false positive probability.
create(long, long) - Static method in class org.apache.spark.util.sketch.BloomFilter
Creates a BloomFilter with given expectedNumItems and numBits, it will pick an optimal numHashFunctions which can minimize fpp for the bloom filter.
create(int, int, int) - Static method in class org.apache.spark.util.sketch.CountMinSketch
Creates a CountMinSketch with given depth, width, and random seed.
create(double, double, int) - Static method in class org.apache.spark.util.sketch.CountMinSketch
Creates a CountMinSketch with given relative error (eps), confidence, and random seed.
createArrayType(DataType) - Static method in class org.apache.spark.sql.types.DataTypes
Creates an ArrayType by specifying the data type of elements (elementType).
createArrayType(DataType, boolean) - Static method in class org.apache.spark.sql.types.DataTypes
Creates an ArrayType by specifying the data type of elements (elementType) and whether the array contains null values (containsNull).
createCombiner() - Method in class org.apache.spark.Aggregator
 
createCompiledClass(String, File, TestUtils.JavaSourceFromString, Seq<URL>) - Static method in class org.apache.spark.TestUtils
Creates a compiled class with the source file.
createCompiledClass(String, File, String, String, Seq<URL>) - Static method in class org.apache.spark.TestUtils
Creates a compiled class with the given name.
createDataFrame(RDD<A>, TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Creates a DataFrame from an RDD of Product (e.g.
createDataFrame(Seq<A>, TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Creates a DataFrame from a local Seq of Product.
createDataFrame(RDD<Row>, StructType) - Method in class org.apache.spark.sql.SparkSession
:: DeveloperApi :: Creates a DataFrame from an RDD containing Rows using the given schema.
createDataFrame(JavaRDD<Row>, StructType) - Method in class org.apache.spark.sql.SparkSession
:: DeveloperApi :: Creates a DataFrame from an JavaRDD containing Rows using the given schema.
createDataFrame(List<Row>, StructType) - Method in class org.apache.spark.sql.SparkSession
:: DeveloperApi :: Creates a DataFrame from an List containing Rows using the given schema.
createDataFrame(RDD<?>, Class<?>) - Method in class org.apache.spark.sql.SparkSession
Applies a schema to an RDD of Java Beans.
createDataFrame(JavaRDD<?>, Class<?>) - Method in class org.apache.spark.sql.SparkSession
Applies a schema to an RDD of Java Beans.
createDataFrame(List<?>, Class<?>) - Method in class org.apache.spark.sql.SparkSession
Applies a schema to an List of Java Beans.
createDataFrame(RDD<Row>, StructType, boolean) - Method in class org.apache.spark.sql.SparkSession
Creates a DataFrame from an RDD[Row].
createDataFrame(RDD<A>, TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates a DataFrame from an RDD of Product (e.g.
createDataFrame(Seq<A>, TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates a DataFrame from a local Seq of Product.
createDataFrame(RDD<Row>, StructType) - Method in class org.apache.spark.sql.SQLContext
:: DeveloperApi :: Creates a DataFrame from an RDD containing Rows using the given schema.
createDataFrame(JavaRDD<Row>, StructType) - Method in class org.apache.spark.sql.SQLContext
:: DeveloperApi :: Creates a DataFrame from an JavaRDD containing Rows using the given schema.
createDataFrame(List<Row>, StructType) - Method in class org.apache.spark.sql.SQLContext
:: DeveloperApi :: Creates a DataFrame from an List containing Rows using the given schema.
createDataFrame(RDD<?>, Class<?>) - Method in class org.apache.spark.sql.SQLContext
Applies a schema to an RDD of Java Beans.
createDataFrame(JavaRDD<?>, Class<?>) - Method in class org.apache.spark.sql.SQLContext
Applies a schema to an RDD of Java Beans.
createDataFrame(List<?>, Class<?>) - Method in class org.apache.spark.sql.SQLContext
Applies a schema to an List of Java Beans.
createDataset(Seq<T>, Encoder<T>) - Method in class org.apache.spark.sql.SparkSession
 
createDataset(RDD<T>, Encoder<T>) - Method in class org.apache.spark.sql.SparkSession
 
createDataset(List<T>, Encoder<T>) - Method in class org.apache.spark.sql.SparkSession
 
createDataset(Seq<T>, Encoder<T>) - Method in class org.apache.spark.sql.SQLContext
 
createDataset(RDD<T>, Encoder<T>) - Method in class org.apache.spark.sql.SQLContext
 
createDataset(List<T>, Encoder<T>) - Method in class org.apache.spark.sql.SQLContext
 
createDecimalType(int, int) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a DecimalType by specifying the precision and scale.
createDecimalType() - Static method in class org.apache.spark.sql.types.DataTypes
Creates a DecimalType with default precision and scale, which are 10 and 0.
createDF(RDD<byte[]>, StructType, SQLContext) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
createDirectory(String, String) - Static method in class org.apache.spark.util.Utils
Create a directory inside the given parent directory.
createDirectStream(StreamingContext, Map<String, String>, Map<TopicAndPartition, Object>, Function1<MessageAndMetadata<K, V>, R>, ClassTag<K>, ClassTag<V>, ClassTag<KD>, ClassTag<VD>, ClassTag<R>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that directly pulls messages from Kafka Brokers without using any receiver.
createDirectStream(StreamingContext, Map<String, String>, Set<String>, ClassTag<K>, ClassTag<V>, ClassTag<KD>, ClassTag<VD>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that directly pulls messages from Kafka Brokers without using any receiver.
createDirectStream(JavaStreamingContext, Class<K>, Class<V>, Class<KD>, Class<VD>, Class<R>, Map<String, String>, Map<TopicAndPartition, Long>, Function<MessageAndMetadata<K, V>, R>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that directly pulls messages from Kafka Brokers without using any receiver.
createDirectStream(JavaStreamingContext, Class<K>, Class<V>, Class<KD>, Class<VD>, Map<String, String>, Set<String>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that directly pulls messages from Kafka Brokers without using any receiver.
createExternalTable(String, String) - Method in class org.apache.spark.sql.catalog.Catalog
:: Experimental :: Creates an external table from the given path and returns the corresponding DataFrame.
createExternalTable(String, String, String) - Method in class org.apache.spark.sql.catalog.Catalog
:: Experimental :: Creates an external table from the given path based on a data source and returns the corresponding DataFrame.
createExternalTable(String, String, Map<String, String>) - Method in class org.apache.spark.sql.catalog.Catalog
:: Experimental :: Creates an external table from the given path based on a data source and a set of options.
createExternalTable(String, String, Map<String, String>) - Method in class org.apache.spark.sql.catalog.Catalog
:: Experimental :: (Scala-specific) Creates an external table from the given path based on a data source and a set of options.
createExternalTable(String, String, StructType, Map<String, String>) - Method in class org.apache.spark.sql.catalog.Catalog
:: Experimental :: Create an external table from the given path based on a data source, a schema and a set of options.
createExternalTable(String, String, StructType, Map<String, String>) - Method in class org.apache.spark.sql.catalog.Catalog
:: Experimental :: (Scala-specific) Create an external table from the given path based on a data source, a schema and a set of options.
createExternalTable(String, String) - Method in class org.apache.spark.sql.internal.CatalogImpl
:: Experimental :: Creates an external table from the given path and returns the corresponding DataFrame.
createExternalTable(String, String, String) - Method in class org.apache.spark.sql.internal.CatalogImpl
:: Experimental :: Creates an external table from the given path based on a data source and returns the corresponding DataFrame.
createExternalTable(String, String, Map<String, String>) - Method in class org.apache.spark.sql.internal.CatalogImpl
:: Experimental :: Creates an external table from the given path based on a data source and a set of options.
createExternalTable(String, String, Map<String, String>) - Method in class org.apache.spark.sql.internal.CatalogImpl
:: Experimental :: (Scala-specific) Creates an external table from the given path based on a data source and a set of options.
createExternalTable(String, String, StructType, Map<String, String>) - Method in class org.apache.spark.sql.internal.CatalogImpl
:: Experimental :: Create an external table from the given path based on a data source, a schema and a set of options.
createExternalTable(String, String, StructType, Map<String, String>) - Method in class org.apache.spark.sql.internal.CatalogImpl
:: Experimental :: (Scala-specific) Create an external table from the given path based on a data source, a schema and a set of options.
createExternalTable(String, String) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates an external table from the given path and returns the corresponding DataFrame.
createExternalTable(String, String, String) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates an external table from the given path based on a data source and returns the corresponding DataFrame.
createExternalTable(String, String, Map<String, String>) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates an external table from the given path based on a data source and a set of options.
createExternalTable(String, String, Map<String, String>) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: (Scala-specific) Creates an external table from the given path based on a data source and a set of options.
createExternalTable(String, String, StructType, Map<String, String>) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Create an external table from the given path based on a data source, a schema and a set of options.
createExternalTable(String, String, StructType, Map<String, String>) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: (Scala-specific) Create an external table from the given path based on a data source, a schema and a set of options.
createFilter(StructType, Filter[]) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
createJar(Seq<File>, File, Option<String>) - Static method in class org.apache.spark.TestUtils
Create a jar file that contains this set of files.
createJarWithClasses(Seq<String>, String, Seq<Tuple2<String, String>>, Seq<URL>) - Static method in class org.apache.spark.TestUtils
Create a jar that defines classes with the given names.
createJarWithFiles(Map<String, String>, File) - Static method in class org.apache.spark.TestUtils
Create a jar file containing multiple files.
createLogForDriver(SparkConf, String, Configuration) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
Create a WriteAheadLog for the driver.
createLogForReceiver(SparkConf, String, Configuration) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
Create a WriteAheadLog for the receiver.
createMapType(DataType, DataType) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a MapType by specifying the data type of keys (keyType) and values (keyType).
createMapType(DataType, DataType, boolean) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a MapType by specifying the data type of keys (keyType), the data type of values (keyType), and whether values contain any null value (valueContainsNull).
createModel(Vector, double) - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
 
createModel(Vector, double) - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
createModel(Vector, double) - Method in class org.apache.spark.mllib.classification.SVMWithSGD
 
createModel(Vector, double) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Create a model given the weights and intercept
createModel(Vector, double) - Method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
createModel(Vector, double) - Method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
createModel(Vector, double) - Method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
createOrReplaceTempView(String) - Method in class org.apache.spark.sql.Dataset
Creates a temporary view using the given name.
createOutputOperationFailureForUI(String) - Static method in class org.apache.spark.streaming.ui.UIUtils
 
createPMMLModelExport(Object) - Static method in class org.apache.spark.mllib.pmml.export.PMMLModelExportFactory
Factory object to help creating the necessary PMMLModelExport implementation taking as input the machine learning model (for example KMeansModel).
createPollingStream(StreamingContext, String, int, StorageLevel) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createPollingStream(StreamingContext, Seq<InetSocketAddress>, StorageLevel) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createPollingStream(StreamingContext, Seq<InetSocketAddress>, StorageLevel, int, int) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createPollingStream(JavaStreamingContext, String, int) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createPollingStream(JavaStreamingContext, String, int, StorageLevel) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createPollingStream(JavaStreamingContext, InetSocketAddress[], StorageLevel) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createPollingStream(JavaStreamingContext, InetSocketAddress[], StorageLevel, int, int) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates an input stream that is to be used with the Spark Sink deployed on a Flume agent.
createRDD(SparkContext, Map<String, String>, OffsetRange[], ClassTag<K>, ClassTag<V>, ClassTag<KD>, ClassTag<VD>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create a RDD from Kafka using offset ranges for each topic and partition.
createRDD(SparkContext, Map<String, String>, OffsetRange[], Map<TopicAndPartition, Broker>, Function1<MessageAndMetadata<K, V>, R>, ClassTag<K>, ClassTag<V>, ClassTag<KD>, ClassTag<VD>, ClassTag<R>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create a RDD from Kafka using offset ranges for each topic and partition.
createRDD(JavaSparkContext, Class<K>, Class<V>, Class<KD>, Class<VD>, Map<String, String>, OffsetRange[]) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create a RDD from Kafka using offset ranges for each topic and partition.
createRDD(JavaSparkContext, Class<K>, Class<V>, Class<KD>, Class<VD>, Class<R>, Map<String, String>, OffsetRange[], Map<TopicAndPartition, Broker>, Function<MessageAndMetadata<K, V>, R>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create a RDD from Kafka using offset ranges for each topic and partition.
createRDDFromArray(JavaSparkContext, byte[][]) - Static method in class org.apache.spark.api.r.RRDD
Create an RRDD given a sequence of byte arrays.
createRDDWithLocalProperties(Time, boolean, Function0<U>) - Method in class org.apache.spark.streaming.dstream.DStream
Wrap a body of code such that the call site and operation scope information are passed to the RDDs created in this body properly.
createRedirectHandler(String, String, Function1<HttpServletRequest, BoxedUnit>, String, Set<String>) - Static method in class org.apache.spark.ui.JettyUtils
Create a handler that always redirects the user to the given path
createRelation(SQLContext, SaveMode, Map<String, String>, Dataset<Row>) - Method in interface org.apache.spark.sql.sources.CreatableRelationProvider
Creates a relation with the given parameters based on the contents of the given DataFrame.
createRelation(SQLContext, Map<String, String>) - Method in interface org.apache.spark.sql.sources.RelationProvider
Returns a new base relation with the given parameters.
createRelation(SQLContext, Map<String, String>, StructType) - Method in interface org.apache.spark.sql.sources.SchemaRelationProvider
Returns a new base relation with the given parameters and user defined schema.
createServlet(JettyUtils.ServletParams<T>, org.apache.spark.SecurityManager, SparkConf, Function1<T, Object>) - Static method in class org.apache.spark.ui.JettyUtils
 
createServletHandler(String, JettyUtils.ServletParams<T>, org.apache.spark.SecurityManager, SparkConf, String, Function1<T, Object>) - Static method in class org.apache.spark.ui.JettyUtils
Create a context handler that responds to a request with the given path prefix
createServletHandler(String, HttpServlet, String) - Static method in class org.apache.spark.ui.JettyUtils
Create a context handler that responds to a request with the given path prefix
createSink(SQLContext, Map<String, String>, Seq<String>) - Method in interface org.apache.spark.sql.sources.StreamSinkProvider
 
createSource(SQLContext, String, Option<StructType>, String, Map<String, String>) - Method in interface org.apache.spark.sql.sources.StreamSourceProvider
 
createSparkContext(String, String, String, String[], Map<Object, Object>, Map<Object, Object>) - Static method in class org.apache.spark.api.r.RRDD
 
createSQLContext(JavaSparkContext) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
createStaticHandler(String, String) - Static method in class org.apache.spark.ui.JettyUtils
Create a handler for serving files from a static directory
createStream(StreamingContext, String, int, StorageLevel) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Create a input stream from a Flume source.
createStream(StreamingContext, String, int, StorageLevel, boolean) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Create a input stream from a Flume source.
createStream(JavaStreamingContext, String, int) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates a input stream from a Flume source.
createStream(JavaStreamingContext, String, int, StorageLevel) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates a input stream from a Flume source.
createStream(JavaStreamingContext, String, int, StorageLevel, boolean) - Static method in class org.apache.spark.streaming.flume.FlumeUtils
Creates a input stream from a Flume source.
createStream(StreamingContext, String, String, Map<String, Object>, StorageLevel) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that pulls messages from Kafka Brokers.
createStream(StreamingContext, Map<String, String>, Map<String, Object>, StorageLevel, ClassTag<K>, ClassTag<V>, ClassTag<U>, ClassTag<T>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that pulls messages from Kafka Brokers.
createStream(JavaStreamingContext, String, String, Map<String, Integer>) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that pulls messages from Kafka Brokers.
createStream(JavaStreamingContext, String, String, Map<String, Integer>, StorageLevel) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that pulls messages from Kafka Brokers.
createStream(JavaStreamingContext, Class<K>, Class<V>, Class<U>, Class<T>, Map<String, String>, Map<String, Integer>, StorageLevel) - Static method in class org.apache.spark.streaming.kafka.KafkaUtils
Create an input stream that pulls messages from Kafka Brokers.
createStream(StreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel, Function1<Record, T>, ClassTag<T>) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(StreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel, Function1<Record, T>, String, String, ClassTag<T>) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(StreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(StreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel, String, String) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(JavaStreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel, Function<Record, T>, Class<T>) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(JavaStreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel, Function<Record, T>, Class<T>, String, String) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(JavaStreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(JavaStreamingContext, String, String, String, String, InitialPositionInStream, Duration, StorageLevel, String, String) - Static method in class org.apache.spark.streaming.kinesis.KinesisUtils
Create an input stream that pulls messages from a Kinesis stream.
createStream(JavaStreamingContext, String, String, String, String, int, Duration, StorageLevel, String, String) - Method in class org.apache.spark.streaming.kinesis.KinesisUtilsPythonHelper
 
createStructField(String, String, boolean) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
createStructField(String, DataType, boolean, Metadata) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a StructField by specifying the name (name), data type (dataType) and whether values of this field can be null values (nullable).
createStructField(String, DataType, boolean) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a StructField with empty metadata.
createStructType(Seq<StructField>) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
createStructType(List<StructField>) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a StructType with the given list of StructFields (fields).
createStructType(StructField[]) - Static method in class org.apache.spark.sql.types.DataTypes
Creates a StructType with the given StructField array (fields).
createTempDir(String, String) - Static method in class org.apache.spark.util.Utils
Create a temporary directory inside the given parent directory.
createTempView(String) - Method in class org.apache.spark.sql.Dataset
Creates a temporary view using the given name.
createTempView(String, Dataset<Row>, boolean) - Method in class org.apache.spark.sql.SparkSession
Creates a temporary view with a DataFrame.
createTransformFunc() - Method in class org.apache.spark.ml.feature.DCT
 
createTransformFunc() - Method in class org.apache.spark.ml.feature.ElementwiseProduct
 
createTransformFunc() - Method in class org.apache.spark.ml.feature.NGram
 
createTransformFunc() - Method in class org.apache.spark.ml.feature.Normalizer
 
createTransformFunc() - Method in class org.apache.spark.ml.feature.PolynomialExpansion
 
createTransformFunc() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
createTransformFunc() - Method in class org.apache.spark.ml.feature.Tokenizer
 
createTransformFunc() - Method in class org.apache.spark.ml.UnaryTransformer
Creates the transform function using the given param map.
createUnsafe(long, int, int) - Static method in class org.apache.spark.sql.types.Decimal
Creates a decimal from unscaled, precision and scale without checking the bounds.
createWorkspace(int) - Static method in class org.apache.spark.mllib.optimization.NNLS
 
crosstab(String, String) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Computes a pair-wise frequency table of the given columns.
CrossValidator - Class in org.apache.spark.ml.tuning
:: Experimental :: K-fold cross validation.
CrossValidator(String) - Constructor for class org.apache.spark.ml.tuning.CrossValidator
 
CrossValidator() - Constructor for class org.apache.spark.ml.tuning.CrossValidator
 
CrossValidatorModel - Class in org.apache.spark.ml.tuning
:: Experimental :: Model from k-fold cross validation.
csv(String...) - Method in class org.apache.spark.sql.DataFrameReader
Loads a CSV file and returns the result as a DataFrame.
csv(Seq<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads a CSV file and returns the result as a DataFrame.
csv(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame in CSV format at the specified path.
cube(Column...) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional cube for the current Dataset using the specified columns, so we can run aggregation on them.
cube(String, String...) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional cube for the current Dataset using the specified columns, so we can run aggregation on them.
cube(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional cube for the current Dataset using the specified columns, so we can run aggregation on them.
cube(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional cube for the current Dataset using the specified columns, so we can run aggregation on them.
cume_dist() - Static method in class org.apache.spark.sql.functions
Window function: returns the cumulative distribution of values within a window partition, i.e.
current_date() - Static method in class org.apache.spark.sql.functions
Returns the current date as a date column.
current_timestamp() - Static method in class org.apache.spark.sql.functions
Returns the current timestamp as a timestamp column.
currentAttemptId() - Method in interface org.apache.spark.SparkStageInfo
 
currentAttemptId() - Method in class org.apache.spark.SparkStageInfoImpl
 
currentDatabase() - Method in class org.apache.spark.sql.catalog.Catalog
Returns the current default database in this session.
currentDatabase() - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns the current default database in this session.

D

dapply(Dataset<Row>, byte[], byte[], Object[], StructType) - Static method in class org.apache.spark.sql.api.r.SQLUtils
The helper function for dapply() on R side.
data() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.LaunchTask
 
data() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate
 
Database - Class in org.apache.spark.sql.catalog
 
Database(String, String, String) - Constructor for class org.apache.spark.sql.catalog.Database
 
database() - Method in class org.apache.spark.sql.catalog.Table
 
databaseTypeDefinition() - Method in class org.apache.spark.sql.jdbc.JdbcType
 
dataDistribution() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
DataFrameNaFunctions - Class in org.apache.spark.sql
:: Experimental :: Functionality for working with missing data in DataFrames.
DataFrameReader - Class in org.apache.spark.sql
Interface used to load a Dataset from external storage systems (e.g.
DataFrameStatFunctions - Class in org.apache.spark.sql
:: Experimental :: Statistic functions for DataFrames.
DataFrameWriter - Class in org.apache.spark.sql
Interface used to write a Dataset to external storage systems (e.g.
Dataset<T> - Class in org.apache.spark.sql
A Dataset is a strongly typed collection of domain-specific objects that can be transformed in parallel using functional or relational operations.
Dataset(SparkSession, LogicalPlan, Encoder<T>) - Constructor for class org.apache.spark.sql.Dataset
 
Dataset(SQLContext, LogicalPlan, Encoder<T>) - Constructor for class org.apache.spark.sql.Dataset
 
DatasetHolder<T> - Class in org.apache.spark.sql
A container for a Dataset, used for implicit conversions in Scala.
DataSourceRegister - Interface in org.apache.spark.sql.sources
::DeveloperApi:: Data sources should implement this trait so that they can register an alias to their data source.
dataTablesHeaderNodes() - Static method in class org.apache.spark.ui.UIUtils
 
dataType() - Method in class org.apache.spark.sql.catalog.Column
 
dataType() - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
The DataType of the returned value of this UserDefinedAggregateFunction.
dataType() - Method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
DataType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The base type of all Spark SQL data types.
DataType() - Constructor for class org.apache.spark.sql.types.DataType
 
dataType() - Method in class org.apache.spark.sql.types.StructField
 
DataTypes - Class in org.apache.spark.sql.types
To get/create specific data type, users should use singleton objects and factory methods provided by this class.
DataTypes() - Constructor for class org.apache.spark.sql.types.DataTypes
 
DataValidators - Class in org.apache.spark.mllib.util
:: DeveloperApi :: A collection of methods used to validate data before applying ML algorithms.
DataValidators() - Constructor for class org.apache.spark.mllib.util.DataValidators
 
date() - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type date.
DATE() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable date type.
date_add(Column, int) - Static method in class org.apache.spark.sql.functions
Returns the date that is days days after start
date_format(Column, String) - Static method in class org.apache.spark.sql.functions
Converts a date/timestamp/string to a value of string in the format specified by the date format given by the second argument.
date_sub(Column, int) - Static method in class org.apache.spark.sql.functions
Returns the date that is days days before start
datediff(Column, Column) - Static method in class org.apache.spark.sql.functions
Returns the number of days from start to end.
DateType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the DateType object.
DateType - Class in org.apache.spark.sql.types
:: DeveloperApi :: A date type, supporting "0001-01-01" through "9999-12-31".
dayofmonth(Column) - Static method in class org.apache.spark.sql.functions
Extracts the day of the month as an integer from a given date/timestamp/string.
dayofyear(Column) - Static method in class org.apache.spark.sql.functions
Extracts the day of the year as an integer from a given date/timestamp/string.
DB2Dialect - Class in org.apache.spark.sql.jdbc
 
DB2Dialect() - Constructor for class org.apache.spark.sql.jdbc.DB2Dialect
 
DCT - Class in org.apache.spark.ml.feature
:: Experimental :: A feature transformer that takes the 1D discrete cosine transform of a real vector.
DCT(String) - Constructor for class org.apache.spark.ml.feature.DCT
 
DCT() - Constructor for class org.apache.spark.ml.feature.DCT
 
deadStorageStatusList() - Method in class org.apache.spark.storage.StorageStatusListener
 
deadStorageStatusList() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
decayFactor() - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
decimal() - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type decimal.
decimal(int, int) - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type decimal.
DECIMAL() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable decimal type.
Decimal - Class in org.apache.spark.sql.types
A mutable implementation of BigDecimal that can hold a Long if values are small enough.
Decimal() - Constructor for class org.apache.spark.sql.types.Decimal
 
Decimal.DecimalAsIfIntegral$ - Class in org.apache.spark.sql.types
A Integral evidence parameter for Decimals.
Decimal.DecimalAsIfIntegral$() - Constructor for class org.apache.spark.sql.types.Decimal.DecimalAsIfIntegral$
 
Decimal.DecimalIsFractional$ - Class in org.apache.spark.sql.types
A Fractional evidence parameter for Decimals.
Decimal.DecimalIsFractional$() - Constructor for class org.apache.spark.sql.types.Decimal.DecimalIsFractional$
 
DecimalType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing java.math.BigDecimal values.
DecimalType(int, int) - Constructor for class org.apache.spark.sql.types.DecimalType
 
DecimalType(int) - Constructor for class org.apache.spark.sql.types.DecimalType
 
DecimalType() - Constructor for class org.apache.spark.sql.types.DecimalType
 
DecimalType.Expression$ - Class in org.apache.spark.sql.types
 
DecimalType.Expression$() - Constructor for class org.apache.spark.sql.types.DecimalType.Expression$
 
DecimalType.Fixed$ - Class in org.apache.spark.sql.types
 
DecimalType.Fixed$() - Constructor for class org.apache.spark.sql.types.DecimalType.Fixed$
 
DecisionTree - Class in org.apache.spark.mllib.tree
A class which implements a decision tree learning algorithm for classification and regression.
DecisionTree(Strategy) - Constructor for class org.apache.spark.mllib.tree.DecisionTree
 
DecisionTreeClassificationModel - Class in org.apache.spark.ml.classification
:: Experimental :: Decision tree model for classification.
DecisionTreeClassifier - Class in org.apache.spark.ml.classification
:: Experimental :: Decision tree learning algorithm for classification.
DecisionTreeClassifier(String) - Constructor for class org.apache.spark.ml.classification.DecisionTreeClassifier
 
DecisionTreeClassifier() - Constructor for class org.apache.spark.ml.classification.DecisionTreeClassifier
 
DecisionTreeModel - Class in org.apache.spark.mllib.tree.model
Decision tree model for classification or regression.
DecisionTreeModel(Node, Enumeration.Value) - Constructor for class org.apache.spark.mllib.tree.model.DecisionTreeModel
 
DecisionTreeModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.tree.model
 
DecisionTreeModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
 
DecisionTreeModel.SaveLoadV1_0$.NodeData - Class in org.apache.spark.mllib.tree.model
Model data for model import/export
DecisionTreeModel.SaveLoadV1_0$.NodeData(int, int, org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0.PredictData, double, boolean, Option<org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0.SplitData>, Option<Object>, Option<Object>, Option<Object>) - Constructor for class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
DecisionTreeModel.SaveLoadV1_0$.PredictData - Class in org.apache.spark.mllib.tree.model
 
DecisionTreeModel.SaveLoadV1_0$.PredictData(double, double) - Constructor for class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.PredictData
 
DecisionTreeModel.SaveLoadV1_0$.SplitData - Class in org.apache.spark.mllib.tree.model
 
DecisionTreeModel.SaveLoadV1_0$.SplitData(int, double, int, Seq<Object>) - Constructor for class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.SplitData
 
DecisionTreeModelReadWrite - Class in org.apache.spark.ml.tree
Helper classes for tree model persistence
DecisionTreeModelReadWrite() - Constructor for class org.apache.spark.ml.tree.DecisionTreeModelReadWrite
 
DecisionTreeModelReadWrite.NodeData - Class in org.apache.spark.ml.tree
Info for a Node
DecisionTreeModelReadWrite.NodeData(int, double, double, double[], double, int, int, DecisionTreeModelReadWrite.SplitData) - Constructor for class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
DecisionTreeModelReadWrite.NodeData$ - Class in org.apache.spark.ml.tree
 
DecisionTreeModelReadWrite.NodeData$() - Constructor for class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData$
 
DecisionTreeModelReadWrite.SplitData - Class in org.apache.spark.ml.tree
Info for a Split
DecisionTreeModelReadWrite.SplitData(int, double[], int) - Constructor for class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData
 
DecisionTreeModelReadWrite.SplitData$ - Class in org.apache.spark.ml.tree
 
DecisionTreeModelReadWrite.SplitData$() - Constructor for class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData$
 
DecisionTreeRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental :: Decision tree model for regression.
DecisionTreeRegressor - Class in org.apache.spark.ml.regression
:: Experimental :: Decision tree learning algorithm for regression.
DecisionTreeRegressor(String) - Constructor for class org.apache.spark.ml.regression.DecisionTreeRegressor
 
DecisionTreeRegressor() - Constructor for class org.apache.spark.ml.regression.DecisionTreeRegressor
 
decode(Column, String) - Static method in class org.apache.spark.sql.functions
Computes the first argument into a string from a binary using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16').
decodeFileNameInURI(URI) - Static method in class org.apache.spark.util.Utils
Get the file name from uri's raw path and decode it.
decodeLabel(Vector) - Static method in class org.apache.spark.ml.classification.LabelConverter
Converts a vector to a label.
decodeURLParameter(String) - Static method in class org.apache.spark.ui.UIUtils
Decode URLParameter if URL is encoded by YARN-WebAppProxyServlet.
DEFAULT_DRIVER_MEM_MB() - Static method in class org.apache.spark.util.Utils
Define a default value for driver memory here since this value is referenced across the code base and nearly all files already use Utils.scala
DEFAULT_MAX_FAILURES() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DEFAULT_ROLLING_INTERVAL_SECS() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DEFAULT_SHUTDOWN_PRIORITY() - Static method in class org.apache.spark.util.ShutdownHookManager
 
defaultAttr() - Static method in class org.apache.spark.ml.attribute.BinaryAttribute
The default binary attribute.
defaultAttr() - Static method in class org.apache.spark.ml.attribute.NominalAttribute
The default nominal attribute.
defaultAttr() - Static method in class org.apache.spark.ml.attribute.NumericAttribute
The default numeric attribute.
defaultClassLoader() - Static method in class org.apache.spark.serializer.KryoSerializer
 
defaultClassLoader() - Method in class org.apache.spark.serializer.Serializer
Default ClassLoader to use in deserialization.
defaultClassLoader_$eq(Option<ClassLoader>) - Static method in class org.apache.spark.serializer.KryoSerializer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.KMeans
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.LDA
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Binarizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.DCT
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.HashingTF
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.IDF
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.IDFModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.IndexToString
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Interaction
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.NGram
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Normalizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.PCA
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.PCAModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.RFormula
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
defaultCopy(ParamMap) - Method in interface org.apache.spark.ml.param.Params
Default implementation of copy with extra params.
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.Pipeline
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.PipelineModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.recommendation.ALS
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
defaultCopy(ParamMap) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
defaultCorrName() - Static method in class org.apache.spark.mllib.stat.correlation.CorrelationNames
 
defaultLink() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
defaultLink() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
defaultLink() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
defaultLink() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
defaultMinPartitions() - Method in class org.apache.spark.api.java.JavaSparkContext
Default min number of partitions for Hadoop RDDs when not given by user
defaultMinPartitions() - Method in class org.apache.spark.SparkContext
Default min number of partitions for Hadoop RDDs when not given by user Notice that we use math.min so the "defaultMinPartitions" cannot be higher than 2.
defaultParallelism() - Method in class org.apache.spark.api.java.JavaSparkContext
Default level of parallelism to use when not given by user (e.g.
defaultParallelism() - Method in class org.apache.spark.SparkContext
Default level of parallelism to use when not given by user (e.g.
defaultParamMap() - Method in interface org.apache.spark.ml.param.Params
Internal param map for default values.
defaultParams(String) - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
Returns default configuration for the boosting algorithm
defaultParams(Enumeration.Value) - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
Returns default configuration for the boosting algorithm
DefaultPartitionCoalescer - Class in org.apache.spark.rdd
Coalesce the partitions of a parent RDD (prev) into fewer partitions, so that each partition of this RDD computes one or more of the parent ones.
DefaultPartitionCoalescer(double) - Constructor for class org.apache.spark.rdd.DefaultPartitionCoalescer
 
DefaultPartitionCoalescer.PartitionLocations - Class in org.apache.spark.rdd
 
DefaultPartitionCoalescer.PartitionLocations(RDD<?>) - Constructor for class org.apache.spark.rdd.DefaultPartitionCoalescer.PartitionLocations
 
defaultPartitioner(RDD<?>, Seq<RDD<?>>) - Static method in class org.apache.spark.Partitioner
Choose a partitioner to use for a cogroup-like operation between a number of RDDs.
defaultSize() - Method in class org.apache.spark.sql.types.ArrayType
The default size of a value of the ArrayType is 100 * the default size of the element type.
defaultSize() - Method in class org.apache.spark.sql.types.BinaryType
The default size of a value of the BinaryType is 100 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.BooleanType
The default size of a value of the BooleanType is 1 byte.
defaultSize() - Method in class org.apache.spark.sql.types.ByteType
The default size of a value of the ByteType is 1 byte.
defaultSize() - Method in class org.apache.spark.sql.types.CalendarIntervalType
 
defaultSize() - Method in class org.apache.spark.sql.types.DataType
The default size of a value of this data type, used internally for size estimation.
defaultSize() - Method in class org.apache.spark.sql.types.DateType
The default size of a value of the DateType is 4 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.DecimalType
The default size of a value of the DecimalType is 8 bytes (precision <= 18) or 16 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.DoubleType
The default size of a value of the DoubleType is 8 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.FloatType
The default size of a value of the FloatType is 4 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.IntegerType
The default size of a value of the IntegerType is 4 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.LongType
The default size of a value of the LongType is 8 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.MapType
The default size of a value of the MapType is 100 * (the default size of the key type + the default size of the value type).
defaultSize() - Method in class org.apache.spark.sql.types.NullType
 
defaultSize() - Static method in class org.apache.spark.sql.types.NumericType
 
defaultSize() - Method in class org.apache.spark.sql.types.ShortType
The default size of a value of the ShortType is 2 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.StringType
The default size of a value of the StringType is 20 bytes.
defaultSize() - Method in class org.apache.spark.sql.types.StructType
The default size of a value of the StructType is the total default sizes of all field types.
defaultSize() - Method in class org.apache.spark.sql.types.TimestampType
The default size of a value of the TimestampType is 8 bytes.
DefaultSource - Class in org.apache.spark.ml.source.libsvm
libsvm package implements Spark SQL data source API for loading LIBSVM data as DataFrame.
DefaultSource() - Constructor for class org.apache.spark.ml.source.libsvm.DefaultSource
 
defaultStrategy(String) - Static method in class org.apache.spark.mllib.tree.configuration.Strategy
Construct a default set of parameters for DecisionTree
defaultStrategy(Enumeration.Value) - Static method in class org.apache.spark.mllib.tree.configuration.Strategy
Construct a default set of parameters for DecisionTree
defaultValue() - Method in class org.apache.spark.internal.config.ConfigEntryWithDefault
 
defaultValueString() - Method in class org.apache.spark.internal.config.ConfigEntryWithDefault
 
defaultValueString() - Method in class org.apache.spark.internal.config.FallbackConfigEntry
 
degree() - Method in class org.apache.spark.ml.feature.PolynomialExpansion
The polynomial degree to expand, which should be >= 1.
degrees() - Method in class org.apache.spark.graphx.GraphOps
The degree of each vertex in the graph.
degreesOfFreedom() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Degrees of freedom.
degreesOfFreedom() - Method in class org.apache.spark.mllib.stat.test.ChiSqTestResult
 
degreesOfFreedom() - Method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTestResult
 
degreesOfFreedom() - Method in interface org.apache.spark.mllib.stat.test.TestResult
Returns the degree(s) of freedom of the hypothesis test.
delegate() - Method in class org.apache.spark.InterruptibleIterator
 
deleteCheckpointFiles() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
Remove any remaining checkpoint files from training.
deleteRecursively(File) - Static method in class org.apache.spark.util.Utils
Delete a file or directory and its contents recursively.
dense(int, int, double[]) - Static method in class org.apache.spark.ml.linalg.Matrices
Creates a column-major dense matrix.
dense(double, double...) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a dense vector from its values.
dense(double, Seq<Object>) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a dense vector from its values.
dense(double[]) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a dense vector from a double array.
dense(int, int, double[]) - Static method in class org.apache.spark.mllib.linalg.Matrices
Creates a column-major dense matrix.
dense(double, double...) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a dense vector from its values.
dense(double, Seq<Object>) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a dense vector from its values.
dense(double[]) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a dense vector from a double array.
dense_rank() - Static method in class org.apache.spark.sql.functions
Window function: returns the rank of rows within a window partition, without any gaps.
DenseMatrix - Class in org.apache.spark.ml.linalg
Column-major dense matrix.
DenseMatrix(int, int, double[], boolean) - Constructor for class org.apache.spark.ml.linalg.DenseMatrix
 
DenseMatrix(int, int, double[]) - Constructor for class org.apache.spark.ml.linalg.DenseMatrix
Column-major dense matrix.
DenseMatrix - Class in org.apache.spark.mllib.linalg
Column-major dense matrix.
DenseMatrix(int, int, double[], boolean) - Constructor for class org.apache.spark.mllib.linalg.DenseMatrix
 
DenseMatrix(int, int, double[]) - Constructor for class org.apache.spark.mllib.linalg.DenseMatrix
Column-major dense matrix.
DenseVector - Class in org.apache.spark.ml.linalg
A dense vector represented by a value array.
DenseVector(double[]) - Constructor for class org.apache.spark.ml.linalg.DenseVector
 
DenseVector - Class in org.apache.spark.mllib.linalg
A dense vector represented by a value array.
DenseVector(double[]) - Constructor for class org.apache.spark.mllib.linalg.DenseVector
 
dependencies() - Static method in class org.apache.spark.api.r.RRDD
 
dependencies() - Static method in class org.apache.spark.graphx.EdgeRDD
 
dependencies() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
dependencies() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
dependencies() - Static method in class org.apache.spark.graphx.VertexRDD
 
dependencies() - Static method in class org.apache.spark.rdd.HadoopRDD
 
dependencies() - Static method in class org.apache.spark.rdd.JdbcRDD
 
dependencies() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
dependencies() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
dependencies() - Method in class org.apache.spark.rdd.RDD
Get the list of dependencies of this RDD, taking into account whether the RDD is checkpointed or not.
dependencies() - Method in class org.apache.spark.streaming.dstream.DStream
List of parent DStreams on which this DStream depends on
dependencies() - Method in class org.apache.spark.streaming.dstream.InputDStream
 
Dependency<T> - Class in org.apache.spark
:: DeveloperApi :: Base class for dependencies.
Dependency() - Constructor for class org.apache.spark.Dependency
 
DEPLOY_MODE - Static variable in class org.apache.spark.launcher.SparkLauncher
The Spark deploy mode.
deployMode() - Method in class org.apache.spark.SparkContext
 
depth() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
depth() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
depth() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Get depth of tree.
depth() - Method in class org.apache.spark.util.sketch.CountMinSketch
Depth of this CountMinSketch.
DerbyDialect - Class in org.apache.spark.sql.jdbc
 
DerbyDialect() - Constructor for class org.apache.spark.sql.jdbc.DerbyDialect
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.CLogLog$
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Identity$
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Inverse$
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Log$
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Logit$
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Probit$
 
deriv(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Sqrt$
 
desc() - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
desc() - Method in class org.apache.spark.sql.Column
Returns an ordering used in sorting.
desc(String) - Static method in class org.apache.spark.sql.functions
Returns a sort expression based on the descending order of the column.
desc() - Method in class org.apache.spark.util.MethodIdentifier
 
describe(String...) - Method in class org.apache.spark.sql.Dataset
Computes statistics for numeric columns, including count, mean, stddev, min, and max.
describe(Seq<String>) - Method in class org.apache.spark.sql.Dataset
Computes statistics for numeric columns, including count, mean, stddev, min, and max.
describeTopics(int) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
describeTopics() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
describeTopics(int) - Method in class org.apache.spark.ml.clustering.LDAModel
Return the topics described by their top-weighted terms.
describeTopics() - Method in class org.apache.spark.ml.clustering.LDAModel
 
describeTopics(int) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
describeTopics() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
describeTopics(int) - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
describeTopics(int) - Method in class org.apache.spark.mllib.clustering.LDAModel
Return the topics described by weighted terms.
describeTopics() - Method in class org.apache.spark.mllib.clustering.LDAModel
Return the topics described by weighted terms.
describeTopics(int) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
description() - Method in class org.apache.spark.ExceptionFailure
 
description() - Method in class org.apache.spark.sql.catalog.Column
 
description() - Method in class org.apache.spark.sql.catalog.Database
 
description() - Method in class org.apache.spark.sql.catalog.Function
 
description() - Method in class org.apache.spark.sql.catalog.Table
 
description() - Method in class org.apache.spark.sql.SinkStatus
 
description() - Method in class org.apache.spark.sql.SourceStatus
 
description() - Method in class org.apache.spark.status.api.v1.JobData
 
description() - Method in class org.apache.spark.storage.StorageLevel
 
description() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
description() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
DeserializationStream - Class in org.apache.spark.serializer
:: DeveloperApi :: A stream for reading serialized objects.
DeserializationStream() - Constructor for class org.apache.spark.serializer.DeserializationStream
 
deserialize(Object) - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
deserialize(ByteBuffer, ClassLoader, ClassTag<T>) - Method in class org.apache.spark.serializer.DummySerializerInstance
 
deserialize(ByteBuffer, ClassTag<T>) - Method in class org.apache.spark.serializer.DummySerializerInstance
 
deserialize(ByteBuffer, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializerInstance
 
deserialize(ByteBuffer, ClassLoader, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializerInstance
 
deserialize(byte[]) - Static method in class org.apache.spark.util.Utils
Deserialize an object using Java serialization
deserialize(byte[], ClassLoader) - Static method in class org.apache.spark.util.Utils
Deserialize an object using Java serialization and the given ClassLoader
deserialized() - Method in class org.apache.spark.storage.StorageLevel
 
DeserializedMemoryEntry<T> - Class in org.apache.spark.storage.memory
 
DeserializedMemoryEntry(Object, long, ClassTag<T>) - Constructor for class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
deserializeLongValue(byte[]) - Static method in class org.apache.spark.util.Utils
Deserialize a Long value (used for PythonPartitioner)
deserializeStream(InputStream) - Method in class org.apache.spark.serializer.DummySerializerInstance
 
deserializeStream(InputStream) - Method in class org.apache.spark.serializer.SerializerInstance
 
deserializeViaNestedStream(InputStream, SerializerInstance, Function1<DeserializationStream, BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Deserialize via nested stream using specific serializer
destroy() - Method in class org.apache.spark.broadcast.Broadcast
Destroy all data and metadata related to this broadcast variable.
details() - Method in class org.apache.spark.scheduler.StageInfo
 
details() - Method in class org.apache.spark.status.api.v1.StageData
 
determineBounds(ArrayBuffer<Tuple2<K, Object>>, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.RangePartitioner
Determines the bounds for range partitioning from candidates with weights indicating how many items each represents.
deterministic() - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Returns true iff this function is deterministic, i.e.
deviance(double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
deviance(double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
deviance(double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
deviance(double, double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
deviance() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
The deviance for the fitted model.
devianceResiduals() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
The weighted residuals, the usual residuals rescaled by the square root of the instance weights.
dfToCols(Dataset<Row>) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
dfToRowRDD(Dataset<Row>) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
dgemm(double, DenseMatrix<Object>, DenseMatrix<Object>, double, DenseMatrix<Object>) - Static method in class org.apache.spark.ml.ann.BreezeUtil
DGEMM: C := alpha * A * B + beta * C
dgemv(double, DenseMatrix<Object>, DenseVector<Object>, double, DenseVector<Object>) - Static method in class org.apache.spark.ml.ann.BreezeUtil
DGEMV: y := alpha * A * x + beta * y
diag(Vector) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
Generate a diagonal matrix in DenseMatrix format from the supplied values.
diag(Vector) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a diagonal matrix in Matrix format from the supplied values.
diag(Vector) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate a diagonal matrix in DenseMatrix format from the supplied values.
diag(Vector) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a diagonal matrix in Matrix format from the supplied values.
diff(RDD<Tuple2<Object, VD>>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
diff(VertexRDD<VD>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
diff(RDD<Tuple2<Object, VD>>) - Method in class org.apache.spark.graphx.VertexRDD
For each vertex present in both this and other, diff returns only those vertices with differing values; for values that are different, keeps the values from other.
diff(VertexRDD<VD>) - Method in class org.apache.spark.graphx.VertexRDD
For each vertex present in both this and other, diff returns only those vertices with differing values; for values that are different, keeps the values from other.
diff(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
dir() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
disableOutputSpecValidation() - Static method in class org.apache.spark.rdd.PairRDDFunctions
Allows for the spark.hadoop.validateOutputSpecs checks to be disabled on a case-by-case basis; see SPARK-4835 for more details.
disconnect() - Method in interface org.apache.spark.launcher.SparkAppHandle
Disconnects the handle from the application, without stopping it.
DISK_BYTES_SPILLED() - Static method in class org.apache.spark.InternalAccumulator
 
DISK_ONLY - Static variable in class org.apache.spark.api.java.StorageLevels
 
DISK_ONLY() - Static method in class org.apache.spark.storage.StorageLevel
 
DISK_ONLY_2 - Static variable in class org.apache.spark.api.java.StorageLevels
 
DISK_ONLY_2() - Static method in class org.apache.spark.storage.StorageLevel
 
diskBytesSpilled() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
diskBytesSpilled() - Method in class org.apache.spark.status.api.v1.StageData
 
diskBytesSpilled() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
diskBytesSpilled() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
diskBytesSpilled() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
diskBytesSpilled() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
diskSize() - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
diskSize() - Method in class org.apache.spark.storage.BlockStatus
 
diskSize() - Method in class org.apache.spark.storage.BlockUpdatedInfo
 
diskSize() - Method in class org.apache.spark.storage.RDDInfo
 
diskUsed() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
diskUsed() - Method in class org.apache.spark.status.api.v1.RDDDataDistribution
 
diskUsed() - Method in class org.apache.spark.status.api.v1.RDDPartitionInfo
 
diskUsed() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
diskUsed() - Method in class org.apache.spark.storage.StorageStatus
Return the disk space used by this block manager.
diskUsedByRdd(int) - Method in class org.apache.spark.storage.StorageStatus
Return the disk space used by the given RDD in this block manager in O(1) time.
dispersion() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
The dispersion of the fitted model.
dispose(ByteBuffer) - Static method in class org.apache.spark.storage.StorageUtils
Attempt to clean up a ByteBuffer if it is memory-mapped.
distinct() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a new RDD containing the distinct elements in this RDD.
distinct(int) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a new RDD containing the distinct elements in this RDD.
distinct() - Method in class org.apache.spark.api.java.JavaPairRDD
Return a new RDD containing the distinct elements in this RDD.
distinct(int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a new RDD containing the distinct elements in this RDD.
distinct() - Method in class org.apache.spark.api.java.JavaRDD
Return a new RDD containing the distinct elements in this RDD.
distinct(int) - Method in class org.apache.spark.api.java.JavaRDD
Return a new RDD containing the distinct elements in this RDD.
distinct(int, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
distinct() - Static method in class org.apache.spark.api.r.RRDD
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
distinct() - Static method in class org.apache.spark.graphx.EdgeRDD
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
distinct() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
distinct() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
distinct() - Static method in class org.apache.spark.graphx.VertexRDD
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
distinct() - Static method in class org.apache.spark.rdd.HadoopRDD
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
distinct() - Static method in class org.apache.spark.rdd.JdbcRDD
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
distinct() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
distinct(int, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
distinct() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
distinct(int, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD containing the distinct elements in this RDD.
distinct() - Method in class org.apache.spark.rdd.RDD
Return a new RDD containing the distinct elements in this RDD.
distinct() - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset that contains only the unique rows from this Dataset.
distinct(Column...) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Creates a Column for this UDAF using the distinct values of the given Columns as input arguments.
distinct(Seq<Column>) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Creates a Column for this UDAF using the distinct values of the given Columns as input arguments.
distinct() - Static method in class org.apache.spark.sql.types.StructType
 
distinct$default$2(int) - Static method in class org.apache.spark.api.r.RRDD
 
distinct$default$2(int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
distinct$default$2(int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
distinct$default$2(int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
distinct$default$2(int) - Static method in class org.apache.spark.graphx.VertexRDD
 
distinct$default$2(int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
distinct$default$2(int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
distinct$default$2(int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
distinct$default$2(int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
DistributedLDAModel - Class in org.apache.spark.ml.clustering
:: Experimental ::
DistributedLDAModel - Class in org.apache.spark.mllib.clustering
Distributed LDA model.
DistributedMatrix - Interface in org.apache.spark.mllib.linalg.distributed
Represents a distributively stored matrix backed by one or more RDDs.
div(Decimal, Decimal) - Method in class org.apache.spark.sql.types.Decimal.DecimalIsFractional$
 
div(Duration) - Method in class org.apache.spark.streaming.Duration
 
divide(Object) - Method in class org.apache.spark.sql.Column
Division this expression by another expression.
doc() - Static method in class org.apache.spark.ml.param.DoubleParam
 
doc() - Static method in class org.apache.spark.ml.param.FloatParam
 
doc() - Method in class org.apache.spark.ml.param.Param
 
docConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
docConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
docConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
docConcentration() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
docConcentration() - Method in class org.apache.spark.mllib.clustering.LDAModel
Concentration parameter (commonly named "alpha") for the prior placed on documents' distributions over topics ("theta").
docConcentration() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
doDestroy(boolean) - Method in class org.apache.spark.broadcast.Broadcast
Actually destroy all data and metadata related to this broadcast variable.
doesDirectoryContainAnyNewFiles(File, long) - Static method in class org.apache.spark.util.Utils
Determines if a directory contains any files newer than cutoff seconds.
doExecute() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
doExecuteBroadcast() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
doPrepare() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
Dot - Class in org.apache.spark.ml.feature
 
Dot() - Constructor for class org.apache.spark.ml.feature.Dot
 
dot(Vector, Vector) - Static method in class org.apache.spark.ml.linalg.BLAS
dot(x, y)
dot(Vector, Vector) - Static method in class org.apache.spark.mllib.linalg.BLAS
dot(x, y)
doTest(DStream<Tuple2<StatCounter, StatCounter>>) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
doTest(DStream<Tuple2<StatCounter, StatCounter>>) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
DOUBLE() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable double type.
doubleAccumulator(double) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator double variable, which tasks can "add" values to using the add method.
doubleAccumulator(double, String) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator double variable, which tasks can "add" values to using the add method.
doubleAccumulator() - Method in class org.apache.spark.SparkContext
Create and register a double accumulator, which starts with 0 and accumulates inputs by +=.
doubleAccumulator(String) - Method in class org.apache.spark.SparkContext
Create and register a double accumulator, which starts with 0 and accumulates inputs by +=.
DoubleAccumulator - Class in org.apache.spark.util
An accumulator for computing sum, count, and averages for double precision floating numbers.
DoubleAccumulator() - Constructor for class org.apache.spark.util.DoubleAccumulator
 
DoubleArrayParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Array[Double} for Java.
DoubleArrayParam(Params, String, String, Function1<double[], Object>) - Constructor for class org.apache.spark.ml.param.DoubleArrayParam
 
DoubleArrayParam(Params, String, String) - Constructor for class org.apache.spark.ml.param.DoubleArrayParam
 
DoubleFlatMapFunction<T> - Interface in org.apache.spark.api.java.function
A function that returns zero or more records of type Double from each input record.
DoubleFunction<T> - Interface in org.apache.spark.api.java.function
A function that returns Doubles, and can be used to construct DoubleRDDs.
DoubleParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Double] for Java.
DoubleParam(String, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.DoubleParam
 
DoubleParam(String, String, String) - Constructor for class org.apache.spark.ml.param.DoubleParam
 
DoubleParam(Identifiable, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.DoubleParam
 
DoubleParam(Identifiable, String, String) - Constructor for class org.apache.spark.ml.param.DoubleParam
 
DoubleRDDFunctions - Class in org.apache.spark.rdd
Extra functions available on RDDs of Doubles through an implicit conversion.
DoubleRDDFunctions(RDD<Object>) - Constructor for class org.apache.spark.rdd.DoubleRDDFunctions
 
doubleRDDToDoubleRDDFunctions(RDD<Object>) - Static method in class org.apache.spark.rdd.RDD
 
DoubleType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the DoubleType object.
DoubleType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Double values.
doUnpersist(boolean) - Method in class org.apache.spark.broadcast.Broadcast
Actually unpersist the broadcasted value on the executors.
driver() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.SetupDriver
 
DRIVER_EXTRA_CLASSPATH - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the driver class path.
DRIVER_EXTRA_JAVA_OPTIONS - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the driver VM options.
DRIVER_EXTRA_LIBRARY_PATH - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the driver native library path.
DRIVER_MEMORY - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the driver memory.
DRIVER_WAL_BATCHING_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DRIVER_WAL_BATCHING_TIMEOUT_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DRIVER_WAL_CLASS_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DRIVER_WAL_CLOSE_AFTER_WRITE_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DRIVER_WAL_MAX_FAILURES_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
DRIVER_WAL_ROLLING_INTERVAL_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
driverLogs() - Method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
drop() - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that drops rows containing any null or NaN values.
drop(String) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that drops rows containing null or NaN values.
drop(String[]) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that drops rows containing any null or NaN values in the specified columns.
drop(Seq<String>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Returns a new DataFrame that drops rows containing any null or NaN values in the specified columns.
drop(String, String[]) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that drops rows containing null or NaN values in the specified columns.
drop(String, Seq<String>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Returns a new DataFrame that drops rows containing null or NaN values in the specified columns.
drop(int) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that drops rows containing less than minNonNulls non-null and non-NaN values.
drop(int, String[]) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that drops rows containing less than minNonNulls non-null and non-NaN values in the specified columns.
drop(int, Seq<String>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Returns a new DataFrame that drops rows containing less than minNonNulls non-null and non-NaN values in the specified columns.
drop(String...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with columns dropped.
drop(String) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with a column dropped.
drop(Seq<String>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with columns dropped.
drop(Column) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with a column dropped.
drop(int) - Static method in class org.apache.spark.sql.types.StructType
 
dropDuplicates() - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset that contains only the unique rows from this Dataset.
dropDuplicates(Seq<String>) - Method in class org.apache.spark.sql.Dataset
(Scala-specific) Returns a new Dataset with duplicate rows removed, considering only the subset of columns.
dropDuplicates(String[]) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with duplicate rows removed, considering only the subset of columns.
dropLast() - Method in class org.apache.spark.ml.feature.OneHotEncoder
Whether to drop the last category in the encoded vector (default: true)
dropRight(int) - Static method in class org.apache.spark.sql.types.StructType
 
dropTempTable(String) - Method in class org.apache.spark.sql.SQLContext
Drops the temporary table with the given table name in the catalog.
dropTempView(String) - Method in class org.apache.spark.sql.catalog.Catalog
Drops the temporary view with the given view name in the catalog.
dropTempView(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Drops the temporary view with the given view name in the catalog.
dropWhile(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
Dst - Static variable in class org.apache.spark.graphx.TripletFields
Expose the destination and edge fields but not the source field.
dstAttr() - Method in class org.apache.spark.graphx.EdgeContext
The vertex attribute of the edge's destination vertex.
dstAttr() - Method in class org.apache.spark.graphx.EdgeTriplet
The destination vertex attribute
dstAttr() - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
dstId() - Method in class org.apache.spark.graphx.Edge
 
dstId() - Method in class org.apache.spark.graphx.EdgeContext
The vertex id of the edge's destination vertex.
dstId() - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
dstream() - Method in class org.apache.spark.streaming.api.java.JavaDStream
 
dstream() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
 
dstream() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
dstream() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
dstream() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
dstream() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
dstream() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
DStream<T> - Class in org.apache.spark.streaming.dstream
A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous sequence of RDDs (of the same type) representing a continuous stream of data (see org.apache.spark.rdd.RDD in the Spark core documentation for more details on RDDs).
DStream(StreamingContext, ClassTag<T>) - Constructor for class org.apache.spark.streaming.dstream.DStream
 
dtypes() - Method in class org.apache.spark.sql.Dataset
Returns all column names and their data types as an array.
DummySerializerInstance - Class in org.apache.spark.serializer
Unfortunately, we need a serializer instance in order to construct a DiskBlockObjectWriter.
duration() - Method in class org.apache.spark.scheduler.TaskInfo
 
duration() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
Duration - Class in org.apache.spark.streaming
 
Duration(long) - Constructor for class org.apache.spark.streaming.Duration
 
duration() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
Return the duration of this output operation.
Durations - Class in org.apache.spark.streaming
 
Durations() - Constructor for class org.apache.spark.streaming.Durations
 

E

Edge<ED> - Class in org.apache.spark.graphx
A single directed edge consisting of a source id, target id, and the data associated with the edge.
Edge(long, long, ED) - Constructor for class org.apache.spark.graphx.Edge
 
EdgeActiveness - Enum in org.apache.spark.graphx.impl
Criteria for filtering edges based on activeness.
EdgeContext<VD,ED,A> - Class in org.apache.spark.graphx
Represents an edge along with its neighboring vertices and allows sending messages along the edge.
EdgeContext() - Constructor for class org.apache.spark.graphx.EdgeContext
 
EdgeDirection - Class in org.apache.spark.graphx
The direction of a directed edge relative to a vertex.
edgeListFile(SparkContext, String, boolean, int, StorageLevel, StorageLevel) - Static method in class org.apache.spark.graphx.GraphLoader
Loads a graph from an edge list formatted file where each line contains two integers: a source id and a target id.
EdgeOnly - Static variable in class org.apache.spark.graphx.TripletFields
Expose only the edge field and not the source or destination field.
EdgeRDD<ED> - Class in org.apache.spark.graphx
EdgeRDD[ED, VD] extends RDD[Edge[ED} by storing the edges in columnar format on each partition for performance.
EdgeRDD(SparkContext, Seq<Dependency<?>>) - Constructor for class org.apache.spark.graphx.EdgeRDD
 
EdgeRDDImpl<ED,VD> - Class in org.apache.spark.graphx.impl
 
edges() - Method in class org.apache.spark.graphx.Graph
An RDD containing the edges and their associated attributes.
edges() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
EdgeTriplet<VD,ED> - Class in org.apache.spark.graphx
An edge triplet represents an edge along with the vertex attributes of its neighboring vertices.
EdgeTriplet() - Constructor for class org.apache.spark.graphx.EdgeTriplet
 
EigenValueDecomposition - Class in org.apache.spark.mllib.linalg
Compute eigen-decomposition.
EigenValueDecomposition() - Constructor for class org.apache.spark.mllib.linalg.EigenValueDecomposition
 
Either() - Static method in class org.apache.spark.graphx.EdgeDirection
Edges originating from *or* arriving at a vertex of interest.
elasticNetParam() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
elasticNetParam() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
elasticNetParam() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
elasticNetParam() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
elem(String, Function1<Object, Object>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
elem(Parsers) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
elementType() - Method in class org.apache.spark.sql.types.ArrayType
 
ElementwiseProduct - Class in org.apache.spark.ml.feature
:: Experimental :: Outputs the Hadamard product (i.e., the element-wise product) of each input vector with a provided "weight" vector.
ElementwiseProduct(String) - Constructor for class org.apache.spark.ml.feature.ElementwiseProduct
 
ElementwiseProduct() - Constructor for class org.apache.spark.ml.feature.ElementwiseProduct
 
ElementwiseProduct - Class in org.apache.spark.mllib.feature
Outputs the Hadamard product (i.e., the element-wise product) of each input vector with a provided "weight" vector.
ElementwiseProduct(Vector) - Constructor for class org.apache.spark.mllib.feature.ElementwiseProduct
 
EMLDAOptimizer - Class in org.apache.spark.mllib.clustering
:: DeveloperApi ::
EMLDAOptimizer() - Constructor for class org.apache.spark.mllib.clustering.EMLDAOptimizer
 
empty() - Static method in class org.apache.spark.api.java.Optional
 
empty() - Static method in class org.apache.spark.ml.param.ParamMap
Returns an empty param map.
empty() - Method in class org.apache.spark.mllib.fpm.PrefixSpan.Prefix$
An empty Prefix instance.
empty() - Static method in class org.apache.spark.sql.types.Metadata
Returns an empty Metadata.
empty() - Static method in class org.apache.spark.storage.BlockStatus
 
EMPTY_USER_GROUPS() - Static method in class org.apache.spark.util.Utils
 
emptyDataFrame() - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Returns a DataFrame with no rows or columns.
emptyDataFrame() - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Returns a DataFrame with no rows or columns.
emptyJson() - Static method in class org.apache.spark.util.Utils
Return an empty JSON object
emptyNode(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Return a node with the given node id (but nothing else set).
emptyRDD() - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD that has no partitions or elements.
emptyRDD(ClassTag<T>) - Method in class org.apache.spark.SparkContext
Get an RDD that has no partitions or elements.
enableHiveSupport() - Method in class org.apache.spark.sql.SparkSession.Builder
Enables Hive support, including connectivity to a persistent Hive metastore, support for Hive serdes, and Hive user-defined functions.
enableReceiverLog(SparkConf) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
encode(Column, String) - Static method in class org.apache.spark.sql.functions
Computes the first argument into a binary from a string using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16').
encodeFileNameToURIRawPath(String) - Static method in class org.apache.spark.util.Utils
A file name may contain some invalid URI characters, such as " ".
encodeLabeledPoint(LabeledPoint, int) - Static method in class org.apache.spark.ml.classification.LabelConverter
Encodes a label as a vector.
Encoder<T> - Interface in org.apache.spark.sql
:: Experimental :: Used to convert a JVM object of type T to and from the internal Spark SQL representation.
Encoders - Class in org.apache.spark.sql
:: Experimental :: Methods for creating an Encoder.
Encoders() - Constructor for class org.apache.spark.sql.Encoders
 
endOffset() - Method in exception org.apache.spark.sql.ContinuousQueryException
 
endsWith(Column) - Method in class org.apache.spark.sql.Column
String ends with.
endsWith(String) - Method in class org.apache.spark.sql.Column
String ends with another string literal.
endsWith(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
endTime() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
endTime() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
endTime() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
EnsembleCombiningStrategy - Class in org.apache.spark.mllib.tree.configuration
Enum to select ensemble combining strategy for base learners
EnsembleCombiningStrategy() - Constructor for class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
EnsembleModelReadWrite - Class in org.apache.spark.ml.tree
 
EnsembleModelReadWrite() - Constructor for class org.apache.spark.ml.tree.EnsembleModelReadWrite
 
EnsembleModelReadWrite.EnsembleNodeData - Class in org.apache.spark.ml.tree
Info for one Node in a tree ensemble
EnsembleModelReadWrite.EnsembleNodeData(int, DecisionTreeModelReadWrite.NodeData) - Constructor for class org.apache.spark.ml.tree.EnsembleModelReadWrite.EnsembleNodeData
 
EnsembleModelReadWrite.EnsembleNodeData$ - Class in org.apache.spark.ml.tree
 
EnsembleModelReadWrite.EnsembleNodeData$() - Constructor for class org.apache.spark.ml.tree.EnsembleModelReadWrite.EnsembleNodeData$
 
entries() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
 
Entropy - Class in org.apache.spark.mllib.tree.impurity
:: Experimental :: Class for calculating entropy during binary classification.
Entropy() - Constructor for class org.apache.spark.mllib.tree.impurity.Entropy
 
entrySet() - Method in class org.apache.spark.api.java.JavaUtils.SerializableMapWrapper
 
EnumUtil - Class in org.apache.spark.util
 
EnumUtil() - Constructor for class org.apache.spark.util.EnumUtil
 
environmentDetails() - Method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
EnvironmentListener - Class in org.apache.spark.ui.env
:: DeveloperApi :: A SparkListener that prepares information to be displayed on the EnvironmentTab
EnvironmentListener() - Constructor for class org.apache.spark.ui.env.EnvironmentListener
 
environmentUpdateFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
environmentUpdateToJson(SparkListenerEnvironmentUpdate) - Static method in class org.apache.spark.util.JsonProtocol
 
EPSILON() - Static method in class org.apache.spark.ml.impl.Utils
 
eqNullSafe(Object) - Method in class org.apache.spark.sql.Column
Equality test that is safe for null values.
EqualNullSafe - Class in org.apache.spark.sql.sources
Performs equality comparison, similar to EqualTo.
EqualNullSafe(String, Object) - Constructor for class org.apache.spark.sql.sources.EqualNullSafe
 
equals(Object) - Static method in class org.apache.spark.Aggregator
 
equals(Object) - Method in class org.apache.spark.api.java.Optional
 
equals(Object) - Static method in class org.apache.spark.CleanAccum
 
equals(Object) - Static method in class org.apache.spark.CleanBroadcast
 
equals(Object) - Static method in class org.apache.spark.CleanCheckpoint
 
equals(Object) - Static method in class org.apache.spark.CleanRDD
 
equals(Object) - Static method in class org.apache.spark.CleanShuffle
 
equals(Object) - Static method in class org.apache.spark.ExceptionFailure
 
equals(Object) - Static method in class org.apache.spark.ExecutorLostFailure
 
equals(Object) - Static method in class org.apache.spark.ExecutorRegistered
 
equals(Object) - Static method in class org.apache.spark.ExecutorRemoved
 
equals(Object) - Static method in class org.apache.spark.ExpireDeadHosts
 
equals(Object) - Static method in class org.apache.spark.FetchFailed
 
equals(Object) - Static method in class org.apache.spark.graphx.Edge
 
equals(Object) - Method in class org.apache.spark.graphx.EdgeDirection
 
equals(Object) - Method in class org.apache.spark.HashPartitioner
 
equals(Object) - Method in class org.apache.spark.ml.attribute.AttributeGroup
 
equals(Object) - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
equals(Object) - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
equals(Object) - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
equals(Object) - Static method in class org.apache.spark.ml.feature.Dot
 
equals(Object) - Static method in class org.apache.spark.ml.feature.LabeledPoint
 
equals(Object) - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
equals(Object) - Method in class org.apache.spark.ml.linalg.DenseVector
 
equals(Object) - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
equals(Object) - Method in class org.apache.spark.ml.linalg.SparseVector
 
equals(Object) - Method in interface org.apache.spark.ml.linalg.Vector
 
equals(Object) - Static method in class org.apache.spark.ml.param.DoubleParam
 
equals(Object) - Static method in class org.apache.spark.ml.param.FloatParam
 
equals(Object) - Method in class org.apache.spark.ml.param.Param
 
equals(Object) - Static method in class org.apache.spark.ml.param.ParamPair
 
equals(Object) - Method in class org.apache.spark.ml.tree.CategoricalSplit
 
equals(Object) - Method in class org.apache.spark.ml.tree.ContinuousSplit
 
equals(Object) - Static method in class org.apache.spark.mllib.feature.VocabWord
 
equals(Object) - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
equals(Object) - Method in class org.apache.spark.mllib.linalg.DenseVector
 
equals(Object) - Static method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
equals(Object) - Static method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
equals(Object) - Static method in class org.apache.spark.mllib.linalg.QRDecomposition
 
equals(Object) - Static method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
equals(Object) - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
equals(Object) - Method in class org.apache.spark.mllib.linalg.SparseVector
 
equals(Object) - Method in interface org.apache.spark.mllib.linalg.Vector
 
equals(Object) - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
equals(Object) - Static method in class org.apache.spark.mllib.recommendation.Rating
 
equals(Object) - Static method in class org.apache.spark.mllib.regression.LabeledPoint
 
equals(Object) - Static method in class org.apache.spark.mllib.stat.test.BinarySample
 
equals(Object) - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
equals(Object) - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
equals(Object) - Method in class org.apache.spark.mllib.tree.model.Predict
 
equals(Object) - Static method in class org.apache.spark.mllib.tree.model.Split
 
equals(Object) - Method in class org.apache.spark.partial.BoundedDouble
Note that consistent with Double, any NaN value will make equality false
equals(Object) - Method in interface org.apache.spark.Partition
 
equals(Object) - Method in class org.apache.spark.RangePartitioner
 
equals(Object) - Static method in class org.apache.spark.Resubmitted
 
equals(Object) - Static method in class org.apache.spark.rpc.netty.OnStart
 
equals(Object) - Static method in class org.apache.spark.rpc.netty.OnStop
 
equals(Object) - Static method in class org.apache.spark.scheduler.AccumulableInfo
 
equals(Object) - Static method in class org.apache.spark.scheduler.AllJobsCancelled
 
equals(Object) - Static method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
equals(Object) - Method in class org.apache.spark.scheduler.cluster.ExecutorInfo
 
equals(Object) - Method in class org.apache.spark.scheduler.InputFormatInfo
 
equals(Object) - Static method in class org.apache.spark.scheduler.JobSucceeded
 
equals(Object) - Static method in class org.apache.spark.scheduler.local.KillTask
 
equals(Object) - Static method in class org.apache.spark.scheduler.local.ReviveOffers
 
equals(Object) - Static method in class org.apache.spark.scheduler.local.StatusUpdate
 
equals(Object) - Static method in class org.apache.spark.scheduler.local.StopExecutor
 
equals(Object) - Static method in class org.apache.spark.scheduler.ResubmitFailedStages
 
equals(Object) - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
equals(Object) - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
equals(Object) - Method in class org.apache.spark.scheduler.SplitInfo
 
equals(Object) - Static method in class org.apache.spark.scheduler.StopCoordinator
 
equals(Object) - Method in class org.apache.spark.sql.Column
 
equals(Object) - Static method in class org.apache.spark.sql.DatasetHolder
 
equals(Object) - Static method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
equals(Object) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
equals(Object) - Static method in class org.apache.spark.sql.internal.HiveSerDe
 
equals(Object) - Static method in class org.apache.spark.sql.jdbc.JdbcType
 
equals(Object) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
equals(Object) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
equals(Object) - Static method in class org.apache.spark.sql.ProcessingTime
 
equals(Object) - Method in interface org.apache.spark.sql.Row
 
equals(Object) - Static method in class org.apache.spark.sql.sources.And
 
equals(Object) - Static method in class org.apache.spark.sql.sources.EqualNullSafe
 
equals(Object) - Static method in class org.apache.spark.sql.sources.EqualTo
 
equals(Object) - Static method in class org.apache.spark.sql.sources.GreaterThan
 
equals(Object) - Static method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
equals(Object) - Method in class org.apache.spark.sql.sources.In
 
equals(Object) - Static method in class org.apache.spark.sql.sources.IsNotNull
 
equals(Object) - Static method in class org.apache.spark.sql.sources.IsNull
 
equals(Object) - Static method in class org.apache.spark.sql.sources.LessThan
 
equals(Object) - Static method in class org.apache.spark.sql.sources.LessThanOrEqual
 
equals(Object) - Static method in class org.apache.spark.sql.sources.Not
 
equals(Object) - Static method in class org.apache.spark.sql.sources.Or
 
equals(Object) - Static method in class org.apache.spark.sql.sources.StringContains
 
equals(Object) - Static method in class org.apache.spark.sql.sources.StringEndsWith
 
equals(Object) - Static method in class org.apache.spark.sql.sources.StringStartsWith
 
equals(Object) - Static method in class org.apache.spark.sql.types.ArrayType
 
equals(Object) - Method in class org.apache.spark.sql.types.Decimal
 
equals(Object) - Static method in class org.apache.spark.sql.types.DecimalType
 
equals(Object) - Static method in class org.apache.spark.sql.types.MapType
 
equals(Object) - Method in class org.apache.spark.sql.types.Metadata
 
equals(Object) - Static method in class org.apache.spark.sql.types.StructField
 
equals(Object) - Method in class org.apache.spark.sql.types.StructType
 
equals(Object) - Static method in class org.apache.spark.StopMapOutputTracker
 
equals(Object) - Method in class org.apache.spark.storage.BlockId
 
equals(Object) - Method in class org.apache.spark.storage.BlockManagerId
 
equals(Object) - Static method in class org.apache.spark.storage.BlockStatus
 
equals(Object) - Static method in class org.apache.spark.storage.BlockUpdatedInfo
 
equals(Object) - Static method in class org.apache.spark.storage.BroadcastBlockId
 
equals(Object) - Static method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
equals(Object) - Static method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
equals(Object) - Static method in class org.apache.spark.storage.RDDBlockId
 
equals(Object) - Static method in class org.apache.spark.storage.ShuffleBlockId
 
equals(Object) - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
equals(Object) - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
equals(Object) - Method in class org.apache.spark.storage.StorageLevel
 
equals(Object) - Static method in class org.apache.spark.storage.StreamBlockId
 
equals(Object) - Static method in class org.apache.spark.storage.TaskResultBlockId
 
equals(Object) - Static method in class org.apache.spark.streaming.Duration
 
equals(Object) - Method in class org.apache.spark.streaming.kafka.Broker
Broker's port
equals(Object) - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.AllReceiverIds
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.BatchInfo
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StopAllReceivers
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
equals(Object) - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
equals(Object) - Static method in class org.apache.spark.streaming.Time
 
equals(Object) - Static method in class org.apache.spark.Success
 
equals(Object) - Static method in class org.apache.spark.TaskCommitDenied
 
equals(Object) - Static method in class org.apache.spark.TaskKilled
 
equals(Object) - Static method in class org.apache.spark.TaskResultLost
 
equals(Object) - Static method in class org.apache.spark.TaskSchedulerIsSet
 
equals(Object) - Static method in class org.apache.spark.UnknownReason
 
equals(Object) - Static method in class org.apache.spark.util.MethodIdentifier
 
equals(Object) - Static method in class org.apache.spark.util.MutablePair
 
equalTo(Object) - Method in class org.apache.spark.sql.Column
Equality test.
EqualTo - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a value equal to value.
EqualTo(String, Object) - Constructor for class org.apache.spark.sql.sources.EqualTo
 
err(String) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
errorMessage() - Method in class org.apache.spark.status.api.v1.TaskData
 
errorMessage() - Method in class org.apache.spark.ui.jobs.UIData.TaskUIData
 
estimate(double[]) - Method in class org.apache.spark.mllib.stat.KernelDensity
Estimates probability density function at the given array of points.
estimate(Object) - Static method in class org.apache.spark.util.SizeEstimator
Estimate the number of bytes that the given object takes up on the JVM heap.
estimateCount(Object) - Method in class org.apache.spark.util.sketch.CountMinSketch
Returns the estimated frequency of item.
estimatedDocConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
estimatedDocConcentration() - Method in class org.apache.spark.ml.clustering.LDAModel
Value for docConcentration estimated from data.
estimatedDocConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
Estimator<M extends Model<M>> - Class in org.apache.spark.ml
:: DeveloperApi :: Abstract class for estimators that fit models to data.
Estimator() - Constructor for class org.apache.spark.ml.Estimator
 
estimator() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
estimator() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
estimator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
estimator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
estimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
estimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
estimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
estimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
Evaluates the model on a test dataset.
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
evaluate(Dataset<?>, ParamMap) - Method in class org.apache.spark.ml.evaluation.Evaluator
Evaluates model output and returns a scalar metric (larger is better).
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.evaluation.Evaluator
Evaluates the output.
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
Evaluate the model on the given dataset, returning a summary of the results.
evaluate(Dataset<?>) - Method in class org.apache.spark.ml.regression.LinearRegressionModel
Evaluates the model on a test dataset.
evaluate(Row) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Calculates the final result of this UserDefinedAggregateFunction based on the given aggregation buffer.
evaluateEachIteration(RDD<LabeledPoint>, DecisionTreeRegressionModel[], double[], Loss, Enumeration.Value) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Method to compute error or loss for every iteration of gradient boosting.
evaluateEachIteration(RDD<LabeledPoint>, Loss) - Method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
Method to compute error or loss for every iteration of gradient boosting.
Evaluator - Class in org.apache.spark.ml.evaluation
:: DeveloperApi :: Abstract class for evaluators that compute metrics from predictions.
Evaluator() - Constructor for class org.apache.spark.ml.evaluation.Evaluator
 
evaluator() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
evaluator() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
evaluator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
evaluator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
event() - Method in class org.apache.spark.streaming.flume.SparkFlumeEvent
 
EventTransformer - Class in org.apache.spark.streaming.flume
A simple object that provides the implementation of readExternal and writeExternal for both the wrapper classes for Flume-style Events.
EventTransformer() - Constructor for class org.apache.spark.streaming.flume.EventTransformer
 
except(Dataset<T>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset containing rows in this Dataset but not in another Dataset.
exception() - Method in class org.apache.spark.ExceptionFailure
 
exception() - Method in interface org.apache.spark.sql.ContinuousQuery
Returns the ContinuousQueryException if the query was terminated by an exception.
exception() - Method in class org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread
Contains the exception thrown while writing the parent iterator to the external process.
ExceptionFailure - Class in org.apache.spark
:: DeveloperApi :: Task failed due to a runtime exception.
ExceptionFailure(String, String, StackTraceElement[], String, Option<ThrowableSerializationWrapper>, Seq<AccumulableInfo>, Seq<AccumulatorV2<?, ?>>) - Constructor for class org.apache.spark.ExceptionFailure
 
exceptionFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
exceptionString(Throwable) - Static method in class org.apache.spark.util.Utils
Return a nice string representation of the exception.
exceptionToJson(Exception) - Static method in class org.apache.spark.util.JsonProtocol
 
execId() - Method in class org.apache.spark.ExecutorLostFailure
 
execId() - Method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
execId() - Method in class org.apache.spark.storage.BlockManagerMessages.RemoveExecutor
 
execute() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
executeAndGetOutput(Seq<String>, File, Map<String, String>, boolean) - Static method in class org.apache.spark.util.Utils
Execute a command and get its output, throwing an exception if it yields a code other than 0.
executeBroadcast() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
executeCollect() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
executeCollectPublic() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
executeCommand(Seq<String>, File, Map<String, String>, boolean) - Static method in class org.apache.spark.util.Utils
Execute a command and return the process running the command.
executePlan(LogicalPlan) - Method in class org.apache.spark.sql.SparkSession
 
executePlan(LogicalPlan) - Method in class org.apache.spark.sql.SQLContext
 
executeQuery(Function0<T>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
executeSql(String) - Method in class org.apache.spark.sql.SparkSession
 
executeSql(String) - Method in class org.apache.spark.sql.SQLContext
 
executeTake(int) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
executeToIterator() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
ExecutionListenerManager - Class in org.apache.spark.sql.util
:: Experimental ::
executor() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillTask
 
EXECUTOR_CORES - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the number of executor CPU cores.
EXECUTOR_DESERIALIZE_TIME() - Static method in class org.apache.spark.InternalAccumulator
 
EXECUTOR_EXTRA_CLASSPATH - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the executor class path.
EXECUTOR_EXTRA_JAVA_OPTIONS - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the executor VM options.
EXECUTOR_EXTRA_LIBRARY_PATH - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the executor native library path.
EXECUTOR_MEMORY - Static variable in class org.apache.spark.launcher.SparkLauncher
Configuration key for the executor memory.
EXECUTOR_RUN_TIME() - Static method in class org.apache.spark.InternalAccumulator
 
executorAddedFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
executorAddedToJson(SparkListenerExecutorAdded) - Static method in class org.apache.spark.util.JsonProtocol
 
executorDeserializeTime() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
executorDeserializeTime() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
executorHost() - Method in class org.apache.spark.scheduler.cluster.ExecutorInfo
 
executorId() - Method in class org.apache.spark.ExecutorRegistered
 
executorId() - Method in class org.apache.spark.ExecutorRemoved
 
executorId() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.GetExecutorLossReason
 
executorId() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor
 
executorId() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RemoveExecutor
 
executorId() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate
 
executorId() - Method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
executorId() - Method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
executorId() - Method in class org.apache.spark.scheduler.TaskInfo
 
executorId() - Method in class org.apache.spark.SparkEnv
 
executorId() - Method in class org.apache.spark.status.api.v1.TaskData
 
executorId() - Method in class org.apache.spark.storage.BlockManagerId
 
executorId() - Method in class org.apache.spark.storage.BlockManagerMessages.GetExecutorEndpointRef
 
executorId() - Method in class org.apache.spark.storage.BlockManagerMessages.HasCachedBlocks
 
executorId() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
executorIds() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillExecutors
 
executorIdToBlockManagerId() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
executorIdToData() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
ExecutorInfo - Class in org.apache.spark.scheduler.cluster
:: DeveloperApi :: Stores information about an executor to pass from the scheduler to SparkListeners.
ExecutorInfo(String, int, Map<String, String>) - Constructor for class org.apache.spark.scheduler.cluster.ExecutorInfo
 
executorInfo() - Method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
executorInfoFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
executorInfoToJson(ExecutorInfo) - Static method in class org.apache.spark.util.JsonProtocol
 
ExecutorKilled - Class in org.apache.spark.scheduler
 
ExecutorKilled() - Constructor for class org.apache.spark.scheduler.ExecutorKilled
 
executorLogs() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
ExecutorLostFailure - Class in org.apache.spark
:: DeveloperApi :: The task failed because the executor that it was running on was lost.
ExecutorLostFailure(String, boolean, Option<String>) - Constructor for class org.apache.spark.ExecutorLostFailure
 
executorMetricsUpdateFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
executorMetricsUpdateToJson(SparkListenerExecutorMetricsUpdate) - Static method in class org.apache.spark.util.JsonProtocol
 
executorPct() - Method in class org.apache.spark.scheduler.RuntimePercentage
 
executorRef() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor
 
ExecutorRegistered - Class in org.apache.spark
 
ExecutorRegistered(String) - Constructor for class org.apache.spark.ExecutorRegistered
 
ExecutorRemoved - Class in org.apache.spark
 
ExecutorRemoved(String) - Constructor for class org.apache.spark.ExecutorRemoved
 
executorRemovedFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
executorRemovedToJson(SparkListenerExecutorRemoved) - Static method in class org.apache.spark.util.JsonProtocol
 
executorRunTime() - Method in class org.apache.spark.status.api.v1.StageData
 
executorRunTime() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
executorRunTime() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
executorRunTime() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
executors() - Method in class org.apache.spark.status.api.v1.RDDPartitionInfo
 
ExecutorsListener - Class in org.apache.spark.ui.exec
:: DeveloperApi :: A SparkListener that prepares information to be displayed on the ExecutorsTab
ExecutorsListener(StorageStatusListener, SparkConf) - Constructor for class org.apache.spark.ui.exec.ExecutorsListener
 
ExecutorStageSummary - Class in org.apache.spark.status.api.v1
 
ExecutorSummary - Class in org.apache.spark.status.api.v1
 
executorSummary() - Method in class org.apache.spark.status.api.v1.StageData
 
executorSummary() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
executorToDuration() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToInputBytes() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToInputRecords() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToJvmGCTime() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToLogUrls() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToOutputBytes() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToOutputRecords() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToShuffleRead() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToShuffleWrite() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToTasksActive() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToTasksComplete() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToTasksFailed() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToTasksMax() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
executorToTotalCores() - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
exists(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
exists(String) - Static method in class org.apache.spark.sql.types.UDTRegistration
Queries if a given user class is already registered or not.
exists() - Method in class org.apache.spark.streaming.State
Whether the state already exists
exitCausedByApp() - Method in class org.apache.spark.ExecutorLostFailure
 
exp(Column) - Static method in class org.apache.spark.sql.functions
Computes the exponential of the given value.
exp(String) - Static method in class org.apache.spark.sql.functions
Computes the exponential of the given column.
ExpectationSum - Class in org.apache.spark.mllib.clustering
 
ExpectationSum(double, double[], DenseVector<Object>[], DenseMatrix<Object>[]) - Constructor for class org.apache.spark.mllib.clustering.ExpectationSum
 
expectedFpp() - Method in class org.apache.spark.util.sketch.BloomFilter
Returns the probability that BloomFilter.mightContain(Object) erroneously return true for an object that has not actually been put in the BloomFilter.
experimental() - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: A collection of methods that are considered experimental, but can be used to hook into the query planner for advanced functionality.
experimental() - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: A collection of methods that are considered experimental, but can be used to hook into the query planner for advanced functionality.
ExperimentalMethods - Class in org.apache.spark.sql
:: Experimental :: Holder for experimental methods for the bravest.
ExpireDeadHosts - Class in org.apache.spark
 
ExpireDeadHosts() - Constructor for class org.apache.spark.ExpireDeadHosts
 
explain(boolean) - Method in class org.apache.spark.sql.Column
Prints the expression to the console for debugging purpose.
explain(boolean) - Method in class org.apache.spark.sql.Dataset
Prints the plans (logical and physical) to the console for debugging purposes.
explain() - Method in class org.apache.spark.sql.Dataset
Prints the physical plan to the console for debugging purposes.
explainedVariance() - Method in class org.apache.spark.ml.feature.PCAModel
 
explainedVariance() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Returns the explained variance regression score.
explainedVariance() - Method in class org.apache.spark.mllib.evaluation.RegressionMetrics
Returns the variance explained by regression.
explainedVariance() - Method in class org.apache.spark.mllib.feature.PCAModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.LDA
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.DCT
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.IDF
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Interaction
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.NGram
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.PCA
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormula
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
explainParam(Param<?>) - Method in interface org.apache.spark.ml.param.Params
Explains a param.
explainParam(Param<?>) - Static method in class org.apache.spark.ml.Pipeline
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.PipelineModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
explainParam(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
explainParams() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
explainParams() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
explainParams() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
explainParams() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
explainParams() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
explainParams() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
explainParams() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
explainParams() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
explainParams() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
explainParams() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
explainParams() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
explainParams() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
explainParams() - Static method in class org.apache.spark.ml.clustering.KMeans
 
explainParams() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
explainParams() - Static method in class org.apache.spark.ml.clustering.LDA
 
explainParams() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
explainParams() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
explainParams() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
explainParams() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
explainParams() - Static method in class org.apache.spark.ml.feature.Binarizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
explainParams() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
explainParams() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.DCT
 
explainParams() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
explainParams() - Static method in class org.apache.spark.ml.feature.HashingTF
 
explainParams() - Static method in class org.apache.spark.ml.feature.IDF
 
explainParams() - Static method in class org.apache.spark.ml.feature.IDFModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.IndexToString
 
explainParams() - Static method in class org.apache.spark.ml.feature.Interaction
 
explainParams() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
explainParams() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
explainParams() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.NGram
 
explainParams() - Static method in class org.apache.spark.ml.feature.Normalizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
explainParams() - Static method in class org.apache.spark.ml.feature.PCA
 
explainParams() - Static method in class org.apache.spark.ml.feature.PCAModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
explainParams() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.RFormula
 
explainParams() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
explainParams() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
explainParams() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
explainParams() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
explainParams() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
explainParams() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
explainParams() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
explainParams() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
explainParams() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
explainParams() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
explainParams() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
explainParams() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
explainParams() - Method in interface org.apache.spark.ml.param.Params
Explains all params of this instance.
explainParams() - Static method in class org.apache.spark.ml.Pipeline
 
explainParams() - Static method in class org.apache.spark.ml.PipelineModel
 
explainParams() - Static method in class org.apache.spark.ml.recommendation.ALS
 
explainParams() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
explainParams() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
explainParams() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
explainParams() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
explainParams() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
explainParams() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
explainParams() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
explainParams() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
explainParams() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
explainParams() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
explainParams() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
explainParams() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
explode(Seq<Column>, Function1<Row, TraversableOnce<A>>, TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a new Dataset where each row has been expanded to zero or more rows by the provided function.
explode(String, String, Function1<A, TraversableOnce<B>>, TypeTags.TypeTag<B>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a new Dataset where a single column has been expanded to zero or more rows by the provided function.
explode(Column) - Static method in class org.apache.spark.sql.functions
Creates a new row for each element in the given array or map column.
expm1(Column) - Static method in class org.apache.spark.sql.functions
Computes the exponential of the given value minus one.
expm1(String) - Static method in class org.apache.spark.sql.functions
Computes the exponential of the given column.
ExponentialGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
ExponentialGenerator(double) - Constructor for class org.apache.spark.mllib.random.ExponentialGenerator
 
exponentialJavaRDD(JavaSparkContext, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
exponentialJavaRDD(JavaSparkContext, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
exponentialJavaRDD(JavaSparkContext, double, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
exponentialJavaVectorRDD(JavaSparkContext, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
exponentialJavaVectorRDD(JavaSparkContext, double, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
exponentialJavaVectorRDD(JavaSparkContext, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
exponentialRDD(SparkContext, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD comprised of i.i.d. samples from the exponential distribution with the input mean.
exponentialVectorRDD(SparkContext, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD[Vector] with vectors containing i.i.d. samples drawn from the exponential distribution with the input mean.
expr() - Method in class org.apache.spark.sql.Column
 
expr(String) - Static method in class org.apache.spark.sql.functions
Parses the expression string into the column that it represents, similar to DataFrame.selectExpr
expressions() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
extensionsForCompressionCodecNames() - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
externalBlockStoreSize() - Method in class org.apache.spark.storage.RDDInfo
 
externalCatalog() - Method in class org.apache.spark.sql.SparkSession
 
externalCatalog() - Method in class org.apache.spark.sql.SQLContext
 
extractAFTPoints(Dataset<?>) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
Extract featuresCol, labelCol and censorCol from input dataset, and put it in an RDD with strong types.
extractDistribution(Function1<BatchInfo, Option<Object>>) - Method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
extractDoubleDistribution(Seq<Tuple2<TaskInfo, TaskMetrics>>, Function2<TaskInfo, TaskMetrics, Object>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
extractFn() - Method in class org.apache.spark.ui.JettyUtils.ServletParams
 
extractHostPortFromSparkUrl(String) - Static method in class org.apache.spark.util.Utils
Return a pair of host and port extracted from the sparkUrl.
extractLabeledPoints(Dataset<?>, int) - Method in class org.apache.spark.ml.classification.Classifier
Extract labelCol and featuresCol from the given dataset, and put it in an RDD with strong types.
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
extractLabeledPoints(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
extractLabeledPoints(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
extractLabeledPoints(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
extractLabeledPoints(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
extractLabeledPoints(Dataset<?>) - Method in class org.apache.spark.ml.Predictor
Extract labelCol and featuresCol from the given dataset, and put it in an RDD with strong types.
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
extractLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
extractLongDistribution(Seq<Tuple2<TaskInfo, TaskMetrics>>, Function2<TaskInfo, TaskMetrics, Object>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
extractParamMap() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.KMeans
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.KMeans
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.LDA
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.LDA
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
extractParamMap() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
extractParamMap() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
extractParamMap() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
extractParamMap() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Binarizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Binarizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.DCT
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.DCT
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.HashingTF
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.HashingTF
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.IDF
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.IDF
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.IDFModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.IDFModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.IndexToString
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.IndexToString
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Interaction
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Interaction
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.NGram
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.NGram
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Normalizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Normalizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.PCA
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.PCA
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.PCAModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.PCAModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.RFormula
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.RFormula
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
extractParamMap() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
extractParamMap(ParamMap) - Method in interface org.apache.spark.ml.param.Params
Extracts the embedded default param values and user-supplied values, and then merges them with extra values from input into a flat param map, where the latter value is used if there exist conflicts, i.e., with ordering: default param values < user-supplied values < extra.
extractParamMap() - Method in interface org.apache.spark.ml.param.Params
extractParamMap with no extra values.
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.Pipeline
 
extractParamMap() - Static method in class org.apache.spark.ml.Pipeline
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.PipelineModel
 
extractParamMap() - Static method in class org.apache.spark.ml.PipelineModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.recommendation.ALS
 
extractParamMap() - Static method in class org.apache.spark.ml.recommendation.ALS
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
extractParamMap() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
extractParamMap() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
extractParamMap() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
extractParamMap() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
extractParamMap() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
extractParamMap(ParamMap) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
extractParamMap() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
extractWeightedLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
extractWeightedLabeledPoints(Dataset<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
extraOptimizations() - Method in class org.apache.spark.sql.ExperimentalMethods
 
extraStrategies() - Method in class org.apache.spark.sql.ExperimentalMethods
Allows extra strategies to be injected into the query planner at runtime.
eye(int) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
Generate an Identity Matrix in DenseMatrix format.
eye(int) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a dense Identity Matrix in Matrix format.
eye(int) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate an Identity Matrix in DenseMatrix format.
eye(int) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a dense Identity Matrix in Matrix format.

F

f() - Method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
f1Measure() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns document-based f1-measure averaged by the number of documents
f1Measure(double) - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns f1-measure for a given label (category)
factorial(Column) - Static method in class org.apache.spark.sql.functions
Computes the factorial of the given value.
failed() - Method in class org.apache.spark.scheduler.TaskInfo
 
FAILED() - Static method in class org.apache.spark.TaskState
 
failedJobs() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
failedStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
failedTasks() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
failedTasks() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
failedTasks() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
failure(String) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
failureReason() - Method in class org.apache.spark.scheduler.StageInfo
If the stage failed, the reason why.
failureReason() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
failureReasonCell(String, int, boolean) - Static method in class org.apache.spark.streaming.ui.UIUtils
 
FAIR() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
FallbackConfigEntry<T> - Class in org.apache.spark.internal.config
A config entry whose default value is defined by another config entry.
FallbackConfigEntry(String, String, boolean, ConfigEntry<T>) - Constructor for class org.apache.spark.internal.config.FallbackConfigEntry
 
FalsePositiveRate - Class in org.apache.spark.mllib.evaluation.binary
False positive rate.
FalsePositiveRate() - Constructor for class org.apache.spark.mllib.evaluation.binary.FalsePositiveRate
 
falsePositiveRate(double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns false positive rate for a given label (category)
family() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
family() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
familyAndLink() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
familyObj() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
fastEquals(TreeNode<?>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
feature() - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel.SaveLoadV1_0$.Data
 
feature() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.SplitData
 
feature() - Method in class org.apache.spark.mllib.tree.model.Split
 
featureImportances() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
Estimate of the importance of each feature.
featureImportances() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
Estimate of the importance of each feature.
featureImportances() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
Estimate of the importance of each feature.
featureImportances() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
Estimate of the importance of each feature.
featureImportances() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
Estimate of the importance of each feature.
featureImportances() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
Estimate of the importance of each feature.
featureIndex() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
featureIndex() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
featureIndex() - Method in class org.apache.spark.ml.tree.CategoricalSplit
 
featureIndex() - Method in class org.apache.spark.ml.tree.ContinuousSplit
 
featureIndex() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData
 
featureIndex() - Method in interface org.apache.spark.ml.tree.Split
Index of feature which this split tests
features() - Method in class org.apache.spark.ml.feature.LabeledPoint
 
features() - Method in class org.apache.spark.mllib.regression.LabeledPoint
 
featuresCol() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
 
featuresCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
featuresCol() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
featuresCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
featuresCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
featuresCol() - Method in interface org.apache.spark.ml.classification.LogisticRegressionSummary
Field in "predictions" which gives the features of each instance as a vector.
featuresCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
featuresCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
featuresCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
featuresCol() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
featuresCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
featuresCol() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.KMeans
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
featuresCol() - Method in class org.apache.spark.ml.clustering.KMeansSummary
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.LDA
 
featuresCol() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
featuresCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
featuresCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
featuresCol() - Static method in class org.apache.spark.ml.feature.RFormula
 
featuresCol() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
featuresCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
featuresCol() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
featuresCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
featuresCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
featuresCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
featuresCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
featuresCol() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
 
featuresCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
featuresCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
featuresDataType() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
featuresDataType() - Method in class org.apache.spark.ml.PredictionModel
Returns the SQL DataType corresponding to the FeaturesType type parameter.
featuresDataType() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
featuresDataType() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
featuresDataType() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
featuresDataType() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
featuresDataType() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
featureSubsetStrategy() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
featureSubsetStrategy() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
featureSubsetStrategy() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
featureSubsetStrategy() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
FeatureType - Class in org.apache.spark.mllib.tree.configuration
Enum to describe whether a feature is "continuous" or "categorical"
FeatureType() - Constructor for class org.apache.spark.mllib.tree.configuration.FeatureType
 
featureType() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.SplitData
 
featureType() - Method in class org.apache.spark.mllib.tree.model.Split
 
FETCH_WAIT_TIME() - Method in class org.apache.spark.InternalAccumulator.shuffleRead$
 
FetchFailed - Class in org.apache.spark
:: DeveloperApi :: Task failed to fetch shuffle data from a remote node.
FetchFailed(BlockManagerId, int, int, int, String) - Constructor for class org.apache.spark.FetchFailed
 
fetchFile(String, File, SparkConf, org.apache.spark.SecurityManager, Configuration, long, boolean) - Static method in class org.apache.spark.util.Utils
Download a file or directory to target directory.
fetchPct() - Method in class org.apache.spark.scheduler.RuntimePercentage
 
fetchWaitTime() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
fetchWaitTime() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetrics
 
field() - Method in class org.apache.spark.storage.BroadcastBlockId
 
fieldIndex(String) - Method in interface org.apache.spark.sql.Row
Returns the index of a given field name.
fieldIndex(String) - Method in class org.apache.spark.sql.types.StructType
Returns the index of a given field.
fieldNames() - Method in class org.apache.spark.sql.types.StructType
Returns all field names in an array.
fields() - Method in class org.apache.spark.sql.types.StructType
 
FIFO() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
files() - Method in class org.apache.spark.SparkContext
 
fileStream(String, Class<K>, Class<V>, Class<F>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream that monitors a Hadoop-compatible filesystem for new files and reads them using the given key-value types and input format.
fileStream(String, Class<K>, Class<V>, Class<F>, Function<Path, Boolean>, boolean) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream that monitors a Hadoop-compatible filesystem for new files and reads them using the given key-value types and input format.
fileStream(String, Class<K>, Class<V>, Class<F>, Function<Path, Boolean>, boolean, Configuration) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream that monitors a Hadoop-compatible filesystem for new files and reads them using the given key-value types and input format.
fileStream(String, ClassTag<K>, ClassTag<V>, ClassTag<F>) - Method in class org.apache.spark.streaming.StreamingContext
Create a input stream that monitors a Hadoop-compatible filesystem for new files and reads them using the given key-value types and input format.
fileStream(String, Function1<Path, Object>, boolean, ClassTag<K>, ClassTag<V>, ClassTag<F>) - Method in class org.apache.spark.streaming.StreamingContext
Create a input stream that monitors a Hadoop-compatible filesystem for new files and reads them using the given key-value types and input format.
fileStream(String, Function1<Path, Object>, boolean, Configuration, ClassTag<K>, ClassTag<V>, ClassTag<F>) - Method in class org.apache.spark.streaming.StreamingContext
Create a input stream that monitors a Hadoop-compatible filesystem for new files and reads them using the given key-value types and input format.
fill(double) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that replaces null or NaN values in numeric columns with value.
fill(String) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that replaces null values in string columns with value.
fill(double, String[]) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that replaces null or NaN values in specified numeric columns.
fill(double, Seq<String>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Returns a new DataFrame that replaces null or NaN values in specified numeric columns.
fill(String, String[]) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that replaces null values in specified string columns.
fill(String, Seq<String>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Returns a new DataFrame that replaces null values in specified string columns.
fill(Map<String, Object>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Returns a new DataFrame that replaces null values.
fill(Map<String, Object>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Returns a new DataFrame that replaces null values.
fillInStackTrace() - Static method in exception org.apache.spark.sql.AnalysisException
 
fillInStackTrace() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
filter(Function<Double, Boolean>) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a new RDD containing only the elements that satisfy a predicate.
filter(Function<Tuple2<K, V>, Boolean>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a new RDD containing only the elements that satisfy a predicate.
filter(Function<T, Boolean>) - Method in class org.apache.spark.api.java.JavaRDD
Return a new RDD containing only the elements that satisfy a predicate.
filter(Function1<T, Object>) - Static method in class org.apache.spark.api.r.RRDD
 
filter(Function1<T, Object>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
filter(Function1<Graph<VD, ED>, Graph<VD2, ED2>>, Function1<EdgeTriplet<VD2, ED2>, Object>, Function2<Object, VD2, Object>, ClassTag<VD2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.GraphOps
Filter the graph by computing some values to filter on, and applying the predicates.
filter(Function1<EdgeTriplet<VD, ED>, Object>, Function2<Object, VD, Object>) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
filter(Function1<Tuple2<Object, VD>, Object>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
filter(Function1<Tuple2<Object, VD>, Object>) - Method in class org.apache.spark.graphx.VertexRDD
Restricts the vertex set to the set of vertices satisfying the given predicate.
filter(Params) - Method in class org.apache.spark.ml.param.ParamMap
Filters this param map for the given parent.
filter(Function1<T, Object>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
filter(Function1<T, Object>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
filter(Function1<T, Object>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
filter(Function1<T, Object>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
filter(Function1<T, Object>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD containing only the elements that satisfy a predicate.
filter(Column) - Method in class org.apache.spark.sql.Dataset
Filters rows using the given condition.
filter(String) - Method in class org.apache.spark.sql.Dataset
Filters rows using the given SQL expression.
filter(Function1<T, Object>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a new Dataset that only contains elements where func returns true.
filter(FilterFunction<T>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Java-specific) Returns a new Dataset that only contains elements where func returns true.
Filter - Class in org.apache.spark.sql.sources
A filter predicate for data sources.
Filter() - Constructor for class org.apache.spark.sql.sources.Filter
 
filter(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
filter() - Method in class org.apache.spark.storage.BlockManagerMessages.GetMatchingBlockIds
 
filter(Function<T, Boolean>) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Return a new DStream containing only the elements that satisfy a predicate.
filter(Function<T, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
filter(Function<Tuple2<K, V>, Boolean>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream containing only the elements that satisfy a predicate.
filter(Function<Tuple2<K, V>, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
filter(Function<Tuple2<K, V>, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
filter(Function<T, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
filter(Function1<T, Object>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream containing only the elements that satisfy a predicate.
filterByRange(K, K) - Method in class org.apache.spark.rdd.OrderedRDDFunctions
Returns an RDD containing only the elements in the inclusive range lower to upper.
FilterFunction<T> - Interface in org.apache.spark.api.java.function
Base interface for a function used in Dataset's filter function.
filterName() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter
 
filterNot(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
filterParams() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter
 
finalStorageLevel() - Static method in class org.apache.spark.ml.recommendation.ALS
 
find(Function1<BaseType, Object>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
find(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
findLeader(String, int) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
findLeaders(Set<TopicAndPartition>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
findSplits(RDD<LabeledPoint>, DecisionTreeMetadata, long) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
Returns splits for decision tree calculation.
findSynonyms(String, int) - Method in class org.apache.spark.ml.feature.Word2VecModel
Find "num" number of words closest in similarity to the given word.
findSynonyms(Vector, int) - Method in class org.apache.spark.ml.feature.Word2VecModel
Find "num" number of words closest to similarity to the given vector representation of the word.
findSynonyms(String, int) - Method in class org.apache.spark.mllib.feature.Word2VecModel
Find synonyms of a word
findSynonyms(Vector, int) - Method in class org.apache.spark.mllib.feature.Word2VecModel
Find synonyms of the vector representation of a word
finish(BUF) - Method in class org.apache.spark.sql.expressions.Aggregator
Transform the output of the reduction.
finished() - Method in class org.apache.spark.scheduler.TaskInfo
 
FINISHED() - Static method in class org.apache.spark.TaskState
 
FINISHED_STATES() - Static method in class org.apache.spark.TaskState
 
finishReason() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorUIData
 
finishTime() - Method in class org.apache.spark.scheduler.TaskInfo
The time when the task has completed successfully (including the time to remotely fetch results, if necessary).
finishTime() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorUIData
 
first() - Method in class org.apache.spark.api.java.JavaDoubleRDD
 
first() - Method in class org.apache.spark.api.java.JavaPairRDD
 
first() - Static method in class org.apache.spark.api.java.JavaRDD
 
first() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return the first element in this RDD.
first() - Static method in class org.apache.spark.api.r.RRDD
 
first() - Static method in class org.apache.spark.graphx.EdgeRDD
 
first() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
first() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
first() - Static method in class org.apache.spark.graphx.VertexRDD
 
first() - Static method in class org.apache.spark.rdd.HadoopRDD
 
first() - Static method in class org.apache.spark.rdd.JdbcRDD
 
first() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
first() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
first() - Method in class org.apache.spark.rdd.RDD
Return the first element in this RDD.
first() - Method in class org.apache.spark.sql.Dataset
Returns the first row.
first(Column, boolean) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the first value in a group.
first(String, boolean) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the first value of a column in a group.
first(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the first value in a group.
first(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the first value of a column in a group.
firstParent(ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
firstParent(ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
firstParent(ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Returns the first parent RDD
firstTaskLaunchedTime() - Method in class org.apache.spark.status.api.v1.StageData
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.classification.OneVsRest
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.clustering.KMeans
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.clustering.LDA
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Method in class org.apache.spark.ml.Estimator
Fits a single model to the input data with optional parameters.
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Method in class org.apache.spark.ml.Estimator
Fits a single model to the input data with optional parameters.
fit(Dataset<?>, ParamMap) - Method in class org.apache.spark.ml.Estimator
Fits a single model to the input data with provided parameter map.
fit(Dataset<?>) - Method in class org.apache.spark.ml.Estimator
Fits a model to the input data.
fit(Dataset<?>, ParamMap[]) - Method in class org.apache.spark.ml.Estimator
Fits multiple models to the input data with multiple sets of parameters.
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.IDF
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.MaxAbsScaler
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.PCA
Computes a PCAModel that contains the principal components of the input vectors.
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.RFormula
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.StandardScaler
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.StringIndexer
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.VectorIndexer
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.feature.Word2Vec
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.Pipeline
Fits the pipeline to the input dataset with additional parameters.
fit(Dataset<?>) - Method in class org.apache.spark.ml.Predictor
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.recommendation.ALS
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
fit(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
fit(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
fit(Dataset<?>, ParamMap[]) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
fit(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
fit(Dataset<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
fit(Dataset<?>) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
fit(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.feature.ChiSqSelector
Returns a ChiSquared feature selector.
fit(RDD<Vector>) - Method in class org.apache.spark.mllib.feature.IDF
Computes the inverse document frequency.
fit(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.feature.IDF
Computes the inverse document frequency.
fit(RDD<Vector>) - Method in class org.apache.spark.mllib.feature.PCA
Computes a PCAModel that contains the principal components of the input vectors.
fit(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.feature.PCA
Java-friendly version of fit()
fit(RDD<Vector>) - Method in class org.apache.spark.mllib.feature.StandardScaler
Computes the mean and variance and stores as a model to be used for later scaling.
fit(RDD<S>) - Method in class org.apache.spark.mllib.feature.Word2Vec
Computes the vector representation of each word in vocabulary.
fit(JavaRDD<S>) - Method in class org.apache.spark.mllib.feature.Word2Vec
Computes the vector representation of each word in vocabulary (Java version).
fitIntercept() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
fitIntercept() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
fitIntercept() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
fitIntercept() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
fitIntercept() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
fitIntercept() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
fitIntercept() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
fitIntercept() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.api.java.JavaRDD
 
flatMap(FlatMapFunction<T, U>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by first applying a function to all elements of this RDD, and then flattening the results.
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD by first applying a function to all elements of this RDD, and then flattening the results.
flatMap(Function1<T, TraversableOnce<U>>, Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a new Dataset by first applying a function to all elements of this Dataset, and then flattening the results.
flatMap(FlatMapFunction<T, U>, Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Java-specific) Returns a new Dataset by first applying a function to all elements of this Dataset, and then flattening the results.
flatMap(Function1<BaseType, TraversableOnce<A>>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
flatMap(Function1<A, GenTraversableOnce<B>>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
flatMap(FlatMapFunction<T, U>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream by applying a function to all elements of this DStream, and then flattening the results
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
flatMap(FlatMapFunction<T, U>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
flatMap(Function1<T, TraversableOnce<U>>, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream by applying a function to all elements of this DStream, and then flattening the results
FlatMapFunction<T,R> - Interface in org.apache.spark.api.java.function
A function that returns zero or more output records from each input record.
FlatMapFunction2<T1,T2,R> - Interface in org.apache.spark.api.java.function
A function that takes two inputs and returns zero or more output records.
flatMapGroups(Function2<K, Iterator<V>, TraversableOnce<U>>, Encoder<U>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Applies the given function to each group of data.
flatMapGroups(FlatMapGroupsFunction<K, V, U>, Encoder<U>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Applies the given function to each group of data.
FlatMapGroupsFunction<K,V,R> - Interface in org.apache.spark.api.java.function
A function that returns zero or more output records from each grouping key and its values.
flatMapToDouble(DoubleFlatMapFunction<T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
flatMapToDouble(DoubleFlatMapFunction<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
flatMapToDouble(DoubleFlatMapFunction<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
flatMapToDouble(DoubleFlatMapFunction<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by first applying a function to all elements of this RDD, and then flattening the results.
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.api.java.JavaRDD
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by first applying a function to all elements of this RDD, and then flattening the results.
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream by applying a function to all elements of this DStream, and then flattening the results
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
flatMapToPair(PairFlatMapFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
flatMapValues(Function<V, Iterable<U>>) - Method in class org.apache.spark.api.java.JavaPairRDD
Pass each value in the key-value pair RDD through a flatMap function without changing the keys; this also retains the original RDD's partitioning.
flatMapValues(Function1<V, TraversableOnce<U>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Pass each value in the key-value pair RDD through a flatMap function without changing the keys; this also retains the original RDD's partitioning.
flatMapValues(Function<V, Iterable<U>>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying a flatmap function to the value of each key-value pairs in 'this' DStream without changing the key.
flatMapValues(Function<V, Iterable<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
flatMapValues(Function<V, Iterable<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
flatMapValues(Function1<V, TraversableOnce<U>>, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying a flatmap function to the value of each key-value pairs in 'this' DStream without changing the key.
flatten(Function1<A, GenTraversableOnce<B>>) - Static method in class org.apache.spark.sql.types.StructType
 
FLOAT() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable float type.
FloatParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Float] for Java.
FloatParam(String, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.FloatParam
 
FloatParam(String, String, String) - Constructor for class org.apache.spark.ml.param.FloatParam
 
FloatParam(Identifiable, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.FloatParam
 
FloatParam(Identifiable, String, String) - Constructor for class org.apache.spark.ml.param.FloatParam
 
FloatType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the FloatType object.
FloatType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Float values.
floor(Column) - Static method in class org.apache.spark.sql.functions
Computes the floor of the given value.
floor(String) - Static method in class org.apache.spark.sql.functions
Computes the floor of the given column.
floor() - Method in class org.apache.spark.sql.types.Decimal
 
floor(Duration) - Method in class org.apache.spark.streaming.Time
 
floor(Duration, Time) - Method in class org.apache.spark.streaming.Time
 
FlumeUtils - Class in org.apache.spark.streaming.flume
 
FlumeUtils() - Constructor for class org.apache.spark.streaming.flume.FlumeUtils
 
flush() - Method in class org.apache.spark.io.SnappyOutputStreamWrapper
 
flush() - Method in class org.apache.spark.serializer.SerializationStream
 
flush() - Method in class org.apache.spark.storage.memory.RedirectableOutputStream
 
flush() - Method in class org.apache.spark.storage.TimeTrackingOutputStream
 
fMeasure(double, double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns f-measure for a given label (category)
fMeasure(double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns f1-measure for a given label (category)
fMeasure() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Deprecated.
Use accuracy. Since 2.0.0.
fMeasureByThreshold() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
Returns a dataframe with two fields (threshold, F-Measure) curve with beta = 1.0.
fMeasureByThreshold(double) - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns the (threshold, F-Measure) curve.
fMeasureByThreshold() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns the (threshold, F-Measure) curve with beta = 1.0.
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
fold(T, Function2<T, T, T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Aggregate the elements of each partition, and then the results for all the partitions, using a given associative function and a neutral "zero value".
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.api.r.RRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
fold(T, Function2<T, T, T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
fold(T, Function2<T, T, T>) - Method in class org.apache.spark.rdd.RDD
Aggregate the elements of each partition, and then the results for all the partitions, using a given associative function and a neutral "zero value".
fold(A1, Function2<A1, A1, A1>) - Static method in class org.apache.spark.sql.types.StructType
 
foldByKey(V, Partitioner, Function2<V, V, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative function and a neutral "zero value" which may be added to the result an arbitrary number of times, and must not change the result (e.g ., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
foldByKey(V, int, Function2<V, V, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative function and a neutral "zero value" which may be added to the result an arbitrary number of times, and must not change the result (e.g ., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
foldByKey(V, Function2<V, V, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative function and a neutral "zero value" which may be added to the result an arbitrary number of times, and must not change the result (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
foldByKey(V, Partitioner, Function2<V, V, V>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative function and a neutral "zero value" which may be added to the result an arbitrary number of times, and must not change the result (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
foldByKey(V, int, Function2<V, V, V>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative function and a neutral "zero value" which may be added to the result an arbitrary number of times, and must not change the result (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
foldByKey(V, Function2<V, V, V>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative function and a neutral "zero value" which may be added to the result an arbitrary number of times, and must not change the result (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
foldLeft(B, Function2<B, A, B>) - Static method in class org.apache.spark.sql.types.StructType
 
foldRight(B, Function2<A, B, B>) - Static method in class org.apache.spark.sql.types.StructType
 
forall(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
foreach(VoidFunction<T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
foreach(VoidFunction<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
foreach(VoidFunction<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
foreach(VoidFunction<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Applies a function f to all elements of this RDD.
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.api.r.RRDD
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.graphx.VertexRDD
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
foreach(Function1<T, BoxedUnit>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
foreach(Function1<T, BoxedUnit>) - Method in class org.apache.spark.rdd.RDD
Applies a function f to all elements of this RDD.
foreach(Function1<T, BoxedUnit>) - Method in class org.apache.spark.sql.Dataset
Applies a function f to all rows.
foreach(ForeachFunction<T>) - Method in class org.apache.spark.sql.Dataset
(Java-specific) Runs func on each element of this Dataset.
foreach(Function1<BaseType, BoxedUnit>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
foreach(Function1<A, U>) - Static method in class org.apache.spark.sql.types.StructType
 
foreach(Function1<A, U>) - Static method in class org.apache.spark.sql.types.StructType
 
foreachActive(Function2<Object, Object, BoxedUnit>) - Method in class org.apache.spark.ml.linalg.DenseVector
 
foreachActive(Function3<Object, Object, Object, BoxedUnit>) - Method in interface org.apache.spark.ml.linalg.Matrix
Applies a function f to all the active elements of dense and sparse matrix.
foreachActive(Function2<Object, Object, BoxedUnit>) - Method in class org.apache.spark.ml.linalg.SparseVector
 
foreachActive(Function2<Object, Object, BoxedUnit>) - Method in interface org.apache.spark.ml.linalg.Vector
Applies a function f to all the active elements of dense and sparse vector.
foreachActive(Function2<Object, Object, BoxedUnit>) - Method in class org.apache.spark.mllib.linalg.DenseVector
 
foreachActive(Function3<Object, Object, Object, BoxedUnit>) - Method in interface org.apache.spark.mllib.linalg.Matrix
Applies a function f to all the active elements of dense and sparse matrix.
foreachActive(Function2<Object, Object, BoxedUnit>) - Method in class org.apache.spark.mllib.linalg.SparseVector
 
foreachActive(Function2<Object, Object, BoxedUnit>) - Method in interface org.apache.spark.mllib.linalg.Vector
Applies a function f to all the active elements of dense and sparse vector.
foreachAsync(VoidFunction<T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
foreachAsync(VoidFunction<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
foreachAsync(VoidFunction<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
foreachAsync(VoidFunction<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
The asynchronous version of the foreach action, which applies a function f to all the elements of this RDD.
foreachAsync(Function1<T, BoxedUnit>) - Method in class org.apache.spark.rdd.AsyncRDDActions
Applies a function f to all elements of this RDD.
ForeachFunction<T> - Interface in org.apache.spark.api.java.function
Base interface for a function used in Dataset's foreach function.
foreachPartition(VoidFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
foreachPartition(VoidFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
foreachPartition(VoidFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaRDD
 
foreachPartition(VoidFunction<Iterator<T>>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Applies a function f to each partition of this RDD.
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.api.r.RRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.graphx.VertexRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Method in class org.apache.spark.rdd.RDD
Applies a function f to each partition of this RDD.
foreachPartition(Function1<Iterator<T>, BoxedUnit>) - Method in class org.apache.spark.sql.Dataset
Applies a function f to each partition of this Dataset.
foreachPartition(ForeachPartitionFunction<T>) - Method in class org.apache.spark.sql.Dataset
(Java-specific) Runs func on each partition of this Dataset.
foreachPartitionAsync(VoidFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
foreachPartitionAsync(VoidFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
foreachPartitionAsync(VoidFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaRDD
 
foreachPartitionAsync(VoidFunction<Iterator<T>>) - Method in interface org.apache.spark.api.java.JavaRDDLike
The asynchronous version of the foreachPartition action, which applies a function f to each partition of this RDD.
foreachPartitionAsync(Function1<Iterator<T>, BoxedUnit>) - Method in class org.apache.spark.rdd.AsyncRDDActions
Applies a function f to each partition of this RDD.
ForeachPartitionFunction<T> - Interface in org.apache.spark.api.java.function
Base interface for a function used in Dataset's foreachPartition function.
foreachRDD(VoidFunction<R>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
foreachRDD(VoidFunction2<R, Time>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
foreachRDD(VoidFunction<R>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Apply a function to each RDD in this DStream.
foreachRDD(VoidFunction2<R, Time>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Apply a function to each RDD in this DStream.
foreachRDD(VoidFunction<R>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
foreachRDD(VoidFunction2<R, Time>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
foreachRDD(VoidFunction<R>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
foreachRDD(VoidFunction2<R, Time>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
foreachRDD(VoidFunction<R>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
foreachRDD(VoidFunction2<R, Time>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
foreachRDD(VoidFunction<R>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
foreachRDD(VoidFunction2<R, Time>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
foreachRDD(VoidFunction<R>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
foreachRDD(VoidFunction2<R, Time>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
foreachRDD(Function1<RDD<T>, BoxedUnit>) - Method in class org.apache.spark.streaming.dstream.DStream
Apply a function to each RDD in this DStream.
foreachRDD(Function2<RDD<T>, Time, BoxedUnit>) - Method in class org.apache.spark.streaming.dstream.DStream
Apply a function to each RDD in this DStream.
foreachUp(Function1<BaseType, BoxedUnit>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
format(String) - Method in class org.apache.spark.sql.DataFrameReader
Specifies the input data source format.
format(String) - Method in class org.apache.spark.sql.DataFrameWriter
Specifies the underlying output data source.
format_number(Column, int) - Static method in class org.apache.spark.sql.functions
Formats numeric column x to a format like '#,###,###.##', rounded to d decimal places, and returns the result as a string column.
format_string(String, Column...) - Static method in class org.apache.spark.sql.functions
Formats the arguments in printf-style and returns the result as a string column.
format_string(String, Seq<Column>) - Static method in class org.apache.spark.sql.functions
Formats the arguments in printf-style and returns the result as a string column.
formatBatchTime(long, long, boolean, TimeZone) - Static method in class org.apache.spark.streaming.ui.UIUtils
If batchInterval is less than 1 second, format batchTime with milliseconds.
formatDate(Date) - Static method in class org.apache.spark.ui.UIUtils
 
formatDate(long) - Static method in class org.apache.spark.ui.UIUtils
 
formatDuration(long) - Static method in class org.apache.spark.ui.UIUtils
 
formatDurationVerbose(long) - Static method in class org.apache.spark.ui.UIUtils
Generate a verbose human-readable string representing a duration such as "5 second 35 ms"
formatNumber(double) - Static method in class org.apache.spark.ui.UIUtils
Generate a human-readable string representing a number (e.g.
formatVersion() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
formatVersion() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
formatVersion() - Method in class org.apache.spark.mllib.classification.SVMModel
 
formatVersion() - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
formatVersion() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
formatVersion() - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
 
formatVersion() - Method in class org.apache.spark.mllib.clustering.KMeansModel
 
formatVersion() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
formatVersion() - Method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel
 
formatVersion() - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel
 
formatVersion() - Method in class org.apache.spark.mllib.feature.Word2VecModel
 
formatVersion() - Method in class org.apache.spark.mllib.fpm.FPGrowthModel
 
formatVersion() - Method in class org.apache.spark.mllib.fpm.PrefixSpanModel
 
formatVersion() - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
formatVersion() - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
formatVersion() - Method in class org.apache.spark.mllib.regression.LassoModel
 
formatVersion() - Method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
formatVersion() - Method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
formatVersion() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
 
formatVersion() - Method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
formatVersion() - Method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
formatVersion() - Method in interface org.apache.spark.mllib.util.Saveable
Current version of model save/load format.
formula() - Method in class org.apache.spark.ml.feature.RFormula
R formula parameter.
FPGrowth - Class in org.apache.spark.mllib.fpm
A parallel FP-growth algorithm to mine frequent itemsets.
FPGrowth() - Constructor for class org.apache.spark.mllib.fpm.FPGrowth
Constructs a default instance with default parameters {minSupport: 0.3, numPartitions: same as the input data}.
FPGrowth.FreqItemset<Item> - Class in org.apache.spark.mllib.fpm
Frequent itemset.
FPGrowth.FreqItemset(Object, long) - Constructor for class org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
 
FPGrowthModel<Item> - Class in org.apache.spark.mllib.fpm
Model trained by FPGrowth, which holds frequent itemsets.
FPGrowthModel(RDD<FPGrowth.FreqItemset<Item>>, ClassTag<Item>) - Constructor for class org.apache.spark.mllib.fpm.FPGrowthModel
 
FPGrowthModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.fpm
 
FPGrowthModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.fpm.FPGrowthModel.SaveLoadV1_0$
 
freq() - Method in class org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
 
freq() - Method in class org.apache.spark.mllib.fpm.PrefixSpan.FreqSequence
 
freqItems(String[], double) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Finding frequent items for columns, possibly with false positives.
freqItems(String[]) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Finding frequent items for columns, possibly with false positives.
freqItems(Seq<String>, double) - Method in class org.apache.spark.sql.DataFrameStatFunctions
(Scala-specific) Finding frequent items for columns, possibly with false positives.
freqItems(Seq<String>) - Method in class org.apache.spark.sql.DataFrameStatFunctions
(Scala-specific) Finding frequent items for columns, possibly with false positives.
freqItemsets() - Method in class org.apache.spark.mllib.fpm.FPGrowthModel
 
freqSequences() - Method in class org.apache.spark.mllib.fpm.PrefixSpanModel
 
from_unixtime(Column) - Static method in class org.apache.spark.sql.functions
Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string representing the timestamp of that moment in the current system time zone in the given format.
from_unixtime(Column, String) - Static method in class org.apache.spark.sql.functions
Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string representing the timestamp of that moment in the current system time zone in the given format.
from_utc_timestamp(Column, String) - Static method in class org.apache.spark.sql.functions
Assumes given timestamp is UTC and converts to given timezone.
fromAttributes(Seq<Attribute>) - Static method in class org.apache.spark.sql.types.StructType
 
fromAvroFlumeEvent(AvroFlumeEvent) - Static method in class org.apache.spark.streaming.flume.SparkFlumeEvent
 
fromCOO(int, int, Iterable<Tuple3<Object, Object, Object>>) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
Generate a SparseMatrix from Coordinate List (COO) format.
fromCOO(int, int, Iterable<Tuple3<Object, Object, Object>>) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
Generate a SparseMatrix from Coordinate List (COO) format.
fromDecimal(Object) - Static method in class org.apache.spark.sql.types.Decimal
 
fromDStream(DStream<T>, ClassTag<T>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
Convert a scala DStream to a Java-friendly JavaDStream.
fromEdgePartitions(RDD<Tuple2<Object, EdgePartition<ED, VD>>>, VD, StorageLevel, StorageLevel, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
Create a graph from EdgePartitions, setting referenced vertices to `defaultVertexAttr`.
fromEdges(RDD<Edge<ED>>, ClassTag<ED>, ClassTag<VD>) - Static method in class org.apache.spark.graphx.EdgeRDD
Creates an EdgeRDD from a set of edges.
fromEdges(RDD<Edge<ED>>, VD, StorageLevel, StorageLevel, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.Graph
Construct a graph from a collection of edges.
fromEdges(EdgeRDD<?>, int, VD, ClassTag<VD>) - Static method in class org.apache.spark.graphx.VertexRDD
Constructs a VertexRDD containing all vertices referred to in edges.
fromEdgeTuples(RDD<Tuple2<Object, Object>>, VD, Option<PartitionStrategy>, StorageLevel, StorageLevel, ClassTag<VD>) - Static method in class org.apache.spark.graphx.Graph
Construct a graph from a collection of edges encoded as vertex id pairs.
fromExistingRDDs(VertexRDD<VD>, EdgeRDD<ED>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
Create a graph from a VertexRDD and an EdgeRDD with the same replicated vertex type as the vertices.
fromInputDStream(InputDStream<T>, ClassTag<T>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
Convert a scala InputDStream to a Java-friendly JavaInputDStream.
fromInputDStream(InputDStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
Convert a scala InputDStream of pairs to a Java-friendly JavaPairInputDStream.
fromJavaDStream(JavaDStream<Tuple2<K, V>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
fromJavaRDD(JavaRDD<Tuple2<K, V>>) - Static method in class org.apache.spark.api.java.JavaPairRDD
Convert a JavaRDD of key-value pairs to JavaPairRDD.
fromJson(String) - Static method in class org.apache.spark.ml.linalg.JsonVectorConverter
Parses the JSON representation of a vector into a Vector.
fromJson(String) - Static method in class org.apache.spark.mllib.linalg.Vectors
Parses the JSON representation of a vector into a Vector.
fromJson(String) - Static method in class org.apache.spark.sql.types.DataType
 
fromJson(String) - Static method in class org.apache.spark.sql.types.Metadata
Creates a Metadata instance from JSON.
fromMesos(Protos.TaskState) - Static method in class org.apache.spark.TaskState
 
fromName(String) - Static method in class org.apache.spark.ml.attribute.AttributeType
Gets the AttributeType object from its name.
fromName(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Family$
Gets the Family object from its name.
fromName(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Link$
Gets the Link object from its name.
fromNullable(T) - Static method in class org.apache.spark.api.java.Optional
 
fromOffset() - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
fromOld(Node, Map<Object, Object>) - Static method in class org.apache.spark.ml.tree.Node
Create a new Node from the old Node format, recursively creating child nodes as needed.
fromPairDStream(DStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
fromPairRDD(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.mllib.rdd.MLPairRDDFunctions
Implicit conversion from a pair RDD to MLPairRDDFunctions.
fromRDD(RDD<Object>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
fromRDD(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
fromRDD(RDD<T>, ClassTag<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
fromRDD(RDD<T>, ClassTag<T>) - Static method in class org.apache.spark.mllib.rdd.RDDFunctions
Implicit conversion from an RDD to RDDFunctions.
fromRdd(RDD<?>) - Static method in class org.apache.spark.storage.RDDInfo
 
fromReceiverInputDStream(ReceiverInputDStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
Convert a scala ReceiverInputDStream to a Java-friendly JavaReceiverInputDStream.
fromReceiverInputDStream(ReceiverInputDStream<T>, ClassTag<T>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
Convert a scala ReceiverInputDStream to a Java-friendly JavaReceiverInputDStream.
fromSparkContext(SparkContext) - Static method in class org.apache.spark.api.java.JavaSparkContext
 
fromStage(Stage, int, Option<Object>, TaskMetrics, Seq<Seq<TaskLocation>>) - Static method in class org.apache.spark.scheduler.StageInfo
Construct a StageInfo from a Stage.
fromString(String) - Static method in enum org.apache.spark.JobExecutionStatus
 
fromString(String) - Static method in class org.apache.spark.mllib.tree.impurity.Impurities
 
fromString(String) - Static method in class org.apache.spark.mllib.tree.loss.Losses
 
fromString(String) - Static method in enum org.apache.spark.status.api.v1.ApplicationStatus
 
fromString(String) - Static method in enum org.apache.spark.status.api.v1.StageStatus
 
fromString(String) - Static method in enum org.apache.spark.status.api.v1.TaskSorting
 
fromString(String) - Static method in class org.apache.spark.storage.StorageLevel
:: DeveloperApi :: Return the StorageLevel object with the specified name.
fromStructField(StructField) - Static method in class org.apache.spark.ml.attribute.AttributeGroup
Creates an attribute group from a StructField instance.
fullOuterJoin(JavaPairRDD<K, W>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a full outer join of this and other.
fullOuterJoin(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a full outer join of this and other.
fullOuterJoin(JavaPairRDD<K, W>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a full outer join of this and other.
fullOuterJoin(RDD<Tuple2<K, W>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a full outer join of this and other.
fullOuterJoin(RDD<Tuple2<K, W>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a full outer join of this and other.
fullOuterJoin(RDD<Tuple2<K, W>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a full outer join of this and other.
fullOuterJoin(JavaPairDStream<K, W>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'full outer join' between RDDs of this DStream and other DStream.
fullOuterJoin(JavaPairDStream<K, W>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'full outer join' between RDDs of this DStream and other DStream.
fullOuterJoin(JavaPairDStream<K, W>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'full outer join' between RDDs of this DStream and other DStream.
fullOuterJoin(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
fullOuterJoin(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
fullOuterJoin(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
fullOuterJoin(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
fullOuterJoin(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
fullOuterJoin(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
fullOuterJoin(DStream<Tuple2<K, W>>, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'full outer join' between RDDs of this DStream and other DStream.
fullOuterJoin(DStream<Tuple2<K, W>>, int, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'full outer join' between RDDs of this DStream and other DStream.
fullOuterJoin(DStream<Tuple2<K, W>>, Partitioner, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'full outer join' between RDDs of this DStream and other DStream.
fullStackTrace() - Method in class org.apache.spark.ExceptionFailure
 
Function<T1,R> - Interface in org.apache.spark.api.java.function
Base interface for functions whose return types do not create special RDDs.
Function - Class in org.apache.spark.sql.catalog
 
Function(String, String, String, boolean) - Constructor for class org.apache.spark.sql.catalog.Function
 
function(Function4<Time, KeyType, Option<ValueType>, State<StateType>, Option<MappedType>>) - Static method in class org.apache.spark.streaming.StateSpec
Create a StateSpec for setting all the specifications of the mapWithState operation on a pair DStream.
function(Function3<KeyType, Option<ValueType>, State<StateType>, MappedType>) - Static method in class org.apache.spark.streaming.StateSpec
Create a StateSpec for setting all the specifications of the mapWithState operation on a pair DStream.
function(Function4<Time, KeyType, Optional<ValueType>, State<StateType>, Optional<MappedType>>) - Static method in class org.apache.spark.streaming.StateSpec
Create a StateSpec for setting all the specifications of the mapWithState operation on a JavaPairDStream.
function(Function3<KeyType, Optional<ValueType>, State<StateType>, MappedType>) - Static method in class org.apache.spark.streaming.StateSpec
Create a StateSpec for setting all the specifications of the mapWithState operation on a JavaPairDStream.
Function0<R> - Interface in org.apache.spark.api.java.function
A zero-argument function that returns an R.
Function2<T1,T2,R> - Interface in org.apache.spark.api.java.function
A two-argument function that takes arguments of type T1 and T2 and returns an R.
Function3<T1,T2,T3,R> - Interface in org.apache.spark.api.java.function
A three-argument function that takes arguments of type T1, T2 and T3 and returns an R.
Function4<T1,T2,T3,T4,R> - Interface in org.apache.spark.api.java.function
A four-argument function that takes arguments of type T1, T2, T3 and T4 and returns an R.
functions - Class in org.apache.spark.sql
:: Experimental :: Functions available for DataFrame.
functions() - Constructor for class org.apache.spark.sql.functions
 
FutureAction<T> - Interface in org.apache.spark
A future for the result of an action to support cancellation.
futureExecutionContext() - Static method in class org.apache.spark.rdd.AsyncRDDActions
 

G

gain() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
gain() - Method in class org.apache.spark.ml.tree.InternalNode
 
gain() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
gamma1() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
gamma2() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
gamma6() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
gamma7() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
GammaGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
GammaGenerator(double, double) - Constructor for class org.apache.spark.mllib.random.GammaGenerator
 
gammaJavaRDD(JavaSparkContext, double, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
gammaJavaRDD(JavaSparkContext, double, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
gammaJavaRDD(JavaSparkContext, double, double, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
gammaJavaVectorRDD(JavaSparkContext, double, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
gammaJavaVectorRDD(JavaSparkContext, double, double, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
gammaJavaVectorRDD(JavaSparkContext, double, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
gammaRDD(SparkContext, double, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD comprised of i.i.d. samples from the gamma distribution with the input shape and scale.
gammaShape() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
gammaShape() - Method in class org.apache.spark.mllib.clustering.LDAModel
Shape parameter for random initialization of variational parameter gamma.
gammaShape() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
gammaVectorRDD(SparkContext, double, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD[Vector] with vectors containing i.i.d. samples drawn from the gamma distribution with the input shape and scale.
gaps() - Method in class org.apache.spark.ml.feature.RegexTokenizer
Indicates whether regex splits on gaps (true) or matches tokens (false).
GaussianMixture - Class in org.apache.spark.ml.clustering
:: Experimental :: Gaussian Mixture clustering.
GaussianMixture(String) - Constructor for class org.apache.spark.ml.clustering.GaussianMixture
 
GaussianMixture() - Constructor for class org.apache.spark.ml.clustering.GaussianMixture
 
GaussianMixture - Class in org.apache.spark.mllib.clustering
This class performs expectation maximization for multivariate Gaussian Mixture Models (GMMs).
GaussianMixture() - Constructor for class org.apache.spark.mllib.clustering.GaussianMixture
Constructs a default instance.
GaussianMixtureModel - Class in org.apache.spark.ml.clustering
:: Experimental ::
GaussianMixtureModel - Class in org.apache.spark.mllib.clustering
Multivariate Gaussian Mixture Model (GMM) consisting of k Gaussians, where points are drawn from each Gaussian i=1..k with probability w(i); mu(i) and sigma(i) are the respective mean and covariance for each Gaussian distribution i=1..k.
GaussianMixtureModel(double[], MultivariateGaussian[]) - Constructor for class org.apache.spark.mllib.clustering.GaussianMixtureModel
 
GaussianMixtureSummary - Class in org.apache.spark.ml.clustering
:: Experimental :: Summary of GaussianMixture.
gaussians() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
gaussians() - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
 
gaussiansDF() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
Retrieve Gaussian distributions as a DataFrame.
GBTClassificationModel - Class in org.apache.spark.ml.classification
:: Experimental :: Gradient-Boosted Trees (GBTs) model for classification.
GBTClassificationModel(String, DecisionTreeRegressionModel[], double[]) - Constructor for class org.apache.spark.ml.classification.GBTClassificationModel
Construct a GBTClassificationModel
GBTClassifier - Class in org.apache.spark.ml.classification
:: Experimental :: Gradient-Boosted Trees (GBTs) learning algorithm for classification.
GBTClassifier(String) - Constructor for class org.apache.spark.ml.classification.GBTClassifier
 
GBTClassifier() - Constructor for class org.apache.spark.ml.classification.GBTClassifier
 
GBTRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental ::
GBTRegressionModel(String, DecisionTreeRegressionModel[], double[]) - Constructor for class org.apache.spark.ml.regression.GBTRegressionModel
Construct a GBTRegressionModel
GBTRegressor - Class in org.apache.spark.ml.regression
:: Experimental :: Gradient-Boosted Trees (GBTs) learning algorithm for regression.
GBTRegressor(String) - Constructor for class org.apache.spark.ml.regression.GBTRegressor
 
GBTRegressor() - Constructor for class org.apache.spark.ml.regression.GBTRegressor
 
GC_TIME() - Static method in class org.apache.spark.ui.ToolTips
 
gemm(double, Matrix, DenseMatrix, double, DenseMatrix) - Static method in class org.apache.spark.ml.linalg.BLAS
C := alpha * A * B + beta * C
gemm(double, Matrix, DenseMatrix, double, DenseMatrix) - Static method in class org.apache.spark.mllib.linalg.BLAS
C := alpha * A * B + beta * C
gemv(double, Matrix, Vector, double, DenseVector) - Static method in class org.apache.spark.ml.linalg.BLAS
y := alpha * A * x + beta * y
gemv(double, Matrix, Vector, double, DenseVector) - Static method in class org.apache.spark.mllib.linalg.BLAS
y := alpha * A * x + beta * y
GeneralizedLinearAlgorithm<M extends GeneralizedLinearModel> - Class in org.apache.spark.mllib.regression
:: DeveloperApi :: GeneralizedLinearAlgorithm implements methods to train a Generalized Linear Model (GLM).
GeneralizedLinearAlgorithm() - Constructor for class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
 
GeneralizedLinearModel - Class in org.apache.spark.mllib.regression
:: DeveloperApi :: GeneralizedLinearModel (GLM) represents a model trained using GeneralizedLinearAlgorithm.
GeneralizedLinearModel(Vector, double) - Constructor for class org.apache.spark.mllib.regression.GeneralizedLinearModel
 
GeneralizedLinearRegression - Class in org.apache.spark.ml.regression
:: Experimental ::
GeneralizedLinearRegression(String) - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
GeneralizedLinearRegression() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
GeneralizedLinearRegression.Binomial$ - Class in org.apache.spark.ml.regression
Binomial exponential family distribution.
GeneralizedLinearRegression.Binomial$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
GeneralizedLinearRegression.CLogLog$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.CLogLog$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.CLogLog$
 
GeneralizedLinearRegression.Family$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Family$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Family$
 
GeneralizedLinearRegression.Gamma$ - Class in org.apache.spark.ml.regression
Gamma exponential family distribution.
GeneralizedLinearRegression.Gamma$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
GeneralizedLinearRegression.Gaussian$ - Class in org.apache.spark.ml.regression
Gaussian exponential family distribution.
GeneralizedLinearRegression.Gaussian$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
GeneralizedLinearRegression.Identity$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Identity$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Identity$
 
GeneralizedLinearRegression.Inverse$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Inverse$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Inverse$
 
GeneralizedLinearRegression.Link$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Link$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Link$
 
GeneralizedLinearRegression.Log$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Log$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Log$
 
GeneralizedLinearRegression.Logit$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Logit$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Logit$
 
GeneralizedLinearRegression.Poisson$ - Class in org.apache.spark.ml.regression
Poisson exponential family distribution.
GeneralizedLinearRegression.Poisson$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
GeneralizedLinearRegression.Probit$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Probit$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Probit$
 
GeneralizedLinearRegression.Sqrt$ - Class in org.apache.spark.ml.regression
 
GeneralizedLinearRegression.Sqrt$() - Constructor for class org.apache.spark.ml.regression.GeneralizedLinearRegression.Sqrt$
 
GeneralizedLinearRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental :: Model produced by GeneralizedLinearRegression.
GeneralizedLinearRegressionSummary - Class in org.apache.spark.ml.regression
:: Experimental :: Summary of GeneralizedLinearRegression model and predictions.
GeneralizedLinearRegressionTrainingSummary - Class in org.apache.spark.ml.regression
:: Experimental :: Summary of GeneralizedLinearRegression fitting and model.
generateAssociationRules(double) - Method in class org.apache.spark.mllib.fpm.FPGrowthModel
Generates association rules for the Items in freqItemsets.
generateInitialWeights(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
generateInitialWeights(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
generateInitialWeights(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Generate the initial weights when the user does not supply them
generateInitialWeights(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
generateInitialWeights(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
generateInitialWeights(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
generateKMeansRDD(SparkContext, int, int, int, double, int) - Static method in class org.apache.spark.mllib.util.KMeansDataGenerator
Generate an RDD containing test data for KMeans.
generateLinearInput(double, double[], int, int, double) - Static method in class org.apache.spark.mllib.util.LinearDataGenerator
For compatibility, the generated data without specifying the mean and variance will have zero mean and variance of (1.0/3.0) since the original output range is [-1, 1] with uniform distribution, and the variance of uniform distribution is (b - a)^2^ / 12 which will be (1.0/3.0)
generateLinearInput(double, double[], double[], double[], int, int, double) - Static method in class org.apache.spark.mllib.util.LinearDataGenerator
 
generateLinearInput(double, double[], double[], double[], int, int, double, double) - Static method in class org.apache.spark.mllib.util.LinearDataGenerator
 
generateLinearInputAsList(double, double[], int, int, double) - Static method in class org.apache.spark.mllib.util.LinearDataGenerator
Return a Java List of synthetic data randomly generated according to a multi collinear model.
generateLinearRDD(SparkContext, int, int, double, int, double) - Static method in class org.apache.spark.mllib.util.LinearDataGenerator
Generate an RDD containing sample data for Linear Regression models - including Ridge, Lasso, and unregularized variants.
generateLogisticRDD(SparkContext, int, int, double, int, double) - Static method in class org.apache.spark.mllib.util.LogisticRegressionDataGenerator
Generate an RDD containing test data for LogisticRegression.
generateRandomEdges(int, int, int, long) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
generateTreeString(int, Seq<Object>, StringBuilder) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
genericBuilder() - Static method in class org.apache.spark.sql.types.StructType
 
geq(Object) - Method in class org.apache.spark.sql.Column
Greater than or equal to an expression.
get(Object) - Method in class org.apache.spark.api.java.JavaUtils.SerializableMapWrapper
 
get() - Method in class org.apache.spark.api.java.Optional
 
get(String) - Static method in class org.apache.spark.api.r.JVMObjectTracker
 
get() - Method in interface org.apache.spark.FutureAction
Blocks and returns the result of this job.
get(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.LDA
 
get(Param<T>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
get(Param<T>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
get(Param<T>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.DCT
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.IDF
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Interaction
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.NGram
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.PCA
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormula
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
get(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
get(Param<T>) - Method in class org.apache.spark.ml.param.ParamMap
Optionally returns the value associated with a param.
get(Param<T>) - Method in interface org.apache.spark.ml.param.Params
Optionally returns the user-supplied value of a param.
get(Param<T>) - Static method in class org.apache.spark.ml.Pipeline
 
get(Param<T>) - Static method in class org.apache.spark.ml.PipelineModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
get(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
get(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
get(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
get(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
get(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
get(String) - Method in class org.apache.spark.SparkConf
Get a parameter; throws a NoSuchElementException if it's not set
get(String, String) - Method in class org.apache.spark.SparkConf
Get a parameter, falling back to a default if not set
get() - Static method in class org.apache.spark.SparkEnv
Returns the SparkEnv.
get(String) - Static method in class org.apache.spark.SparkFiles
Get the absolute path of a file added through SparkContext.addFile().
get(String) - Method in class org.apache.spark.sql.ContinuousQueryManager
Returns an active query from this SQLContext or throws exception if bad name
get(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i.
get(String) - Method in class org.apache.spark.sql.RuntimeConfig
Returns the value of Spark runtime configuration property for the given key.
get(String, String) - Method in class org.apache.spark.sql.RuntimeConfig
Returns the value of Spark runtime configuration property for the given key.
get(<any>) - Method in class org.apache.spark.sql.RuntimeConfig
Returns the value of Spark runtime configuration property for the given key.
get(<any>) - Method in class org.apache.spark.sql.RuntimeConfig
 
get(<any>, T) - Method in class org.apache.spark.sql.RuntimeConfig
Returns the value of Spark runtime configuration property for the given key.
get() - Method in class org.apache.spark.streaming.State
Get the state if it exists, otherwise it will throw java.util.NoSuchElementException.
get() - Static method in class org.apache.spark.TaskContext
Return the currently active TaskContext.
get(long) - Static method in class org.apache.spark.util.AccumulatorContext
Returns the AccumulatorV2 registered with the given ID, if any.
get_json_object(Column, String) - Static method in class org.apache.spark.sql.functions
Extracts json object from a json string based on json path specified, and returns json string of the extracted json object.
getAcceptanceResults(RDD<Tuple2<K, V>>, boolean, Map<K, Object>, Option<Map<K, Object>>, long) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
Count the number of items instantly accepted and generate the waitlist for each stratum.
getAcceptsNull() - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
getActive() - Static method in class org.apache.spark.streaming.StreamingContext
:: Experimental ::
getActiveJobIds() - Method in class org.apache.spark.api.java.JavaSparkStatusTracker
Returns an array containing the ids of all active jobs.
getActiveJobIds() - Method in class org.apache.spark.SparkStatusTracker
Returns an array containing the ids of all active jobs.
getActiveOrCreate(Function0<StreamingContext>) - Static method in class org.apache.spark.streaming.StreamingContext
:: Experimental ::
getActiveOrCreate(String, Function0<StreamingContext>, Configuration, boolean) - Static method in class org.apache.spark.streaming.StreamingContext
:: Experimental ::
getActiveStageIds() - Method in class org.apache.spark.api.java.JavaSparkStatusTracker
Returns an array containing the ids of all active stages.
getActiveStageIds() - Method in class org.apache.spark.SparkStatusTracker
Returns an array containing the ids of all active stages.
getAlgo() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getAll() - Method in class org.apache.spark.SparkConf
Get all parameters as a list of pairs
getAll() - Method in class org.apache.spark.sql.RuntimeConfig
Returns all properties set in this conf.
getAllConfs() - Method in class org.apache.spark.sql.SQLContext
Return all the configuration properties that have been set (i.e.
getAllPools() - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Return pools for fair scheduler
getAllPrefLocs(RDD<?>) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer.PartitionLocations
 
GetAllReceiverInfo - Class in org.apache.spark.streaming.scheduler
 
GetAllReceiverInfo() - Constructor for class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
getAlpha() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getAlpha() - Method in class org.apache.spark.mllib.clustering.LDA
Alias for getDocConcentration
getAnyValAs(int) - Method in interface org.apache.spark.sql.Row
Returns the value of a given fieldName.
getAppId() - Method in interface org.apache.spark.launcher.SparkAppHandle
Returns the application ID, or null if not yet known.
getAppId() - Method in class org.apache.spark.SparkConf
Returns the Spark application id, valid in the Driver after TaskScheduler registration and from the start in the Executor.
getAs(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i.
getAs(String) - Method in interface org.apache.spark.sql.Row
Returns the value of a given fieldName.
getAsymmetricAlpha() - Method in class org.apache.spark.mllib.clustering.LDA
Alias for getAsymmetricDocConcentration
getAsymmetricDocConcentration() - Method in class org.apache.spark.mllib.clustering.LDA
Concentration parameter (commonly named "alpha") for the prior placed on documents' distributions over topics ("theta").
getAttr(String) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Gets an attribute by its name.
getAttr(int) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Gets an attribute by its index.
getAvroSchema() - Method in class org.apache.spark.SparkConf
Gets all the avro schemas in the configuration used in the generic Avro record serializer
getBatchingTimeout(SparkConf) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
How long we will wait for the wrappedLog in the BatchedWriteAheadLog to write the records before we fail the write attempt to unblock receivers.
getBernoulliSamplingFunction(RDD<Tuple2<K, V>>, Map<K, Object>, boolean, long) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
Return the per partition sampling function used for sampling without replacement.
getBeta() - Method in class org.apache.spark.mllib.clustering.LDA
Alias for getTopicConcentration
getBinary() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getBinary() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getBinary() - Method in class org.apache.spark.ml.feature.HashingTF
 
getBlock(BlockId) - Method in class org.apache.spark.storage.StorageStatus
Return the given block stored in this block manager in O(1) time.
getBlockSize() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getBoolean(String, boolean) - Method in class org.apache.spark.SparkConf
Get a parameter as a boolean, falling back to a default if not set
getBoolean(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive boolean.
getBoolean(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Boolean.
getBooleanArray(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Boolean array.
getByte(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive byte.
getCachedBlockManagerId(BlockManagerId) - Static method in class org.apache.spark.storage.BlockManagerId
 
getCachedMetadata(String) - Static method in class org.apache.spark.rdd.HadoopRDD
The three methods below are helpers for accessing the local map, a property of the SparkEnv of the local process.
getCacheNodeIds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getCacheNodeIds() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getCallSite(Function1<String, Object>) - Static method in class org.apache.spark.util.Utils
When called inside a class in the spark package, returns the name of the user code class (outside the spark package) that called into Spark, as well as which Spark method they called.
getCaseSensitive() - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
getCatalystType(int, String, int, MetadataBuilder) - Method in class org.apache.spark.sql.jdbc.AggregatedDialect
 
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.DB2Dialect
 
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.DerbyDialect
 
getCatalystType(int, String, int, MetadataBuilder) - Method in class org.apache.spark.sql.jdbc.JdbcDialect
Get the custom datatype mapping for the given jdbc meta information.
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.NoopDialect
 
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
getCatalystType(int, String, int, MetadataBuilder) - Static method in class org.apache.spark.sql.jdbc.PostgresDialect
 
getCategoricalFeatures(StructField) - Static method in class org.apache.spark.ml.util.MetadataUtils
Examine a schema to identify categorical (Binary and Nominal) features.
getCategoricalFeaturesInfo() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getCause() - Static method in exception org.apache.spark.sql.AnalysisException
 
getCause() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
getCensorCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getCensorCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getCheckpointDir() - Method in class org.apache.spark.api.java.JavaSparkContext
 
getCheckpointDir() - Method in class org.apache.spark.SparkContext
 
getCheckpointFile() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
getCheckpointFile() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
getCheckpointFile() - Static method in class org.apache.spark.api.java.JavaRDD
 
getCheckpointFile() - Method in interface org.apache.spark.api.java.JavaRDDLike
Gets the name of the file to which this RDD was checkpointed
getCheckpointFile() - Static method in class org.apache.spark.api.r.RRDD
 
getCheckpointFile() - Static method in class org.apache.spark.graphx.EdgeRDD
 
getCheckpointFile() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
getCheckpointFile() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
getCheckpointFile() - Static method in class org.apache.spark.graphx.VertexRDD
 
getCheckpointFile() - Static method in class org.apache.spark.rdd.HadoopRDD
 
getCheckpointFile() - Static method in class org.apache.spark.rdd.JdbcRDD
 
getCheckpointFile() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
getCheckpointFile() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
getCheckpointFile() - Method in class org.apache.spark.rdd.RDD
Gets the name of the directory to which this RDD was checkpointed.
getCheckpointFiles() - Method in class org.apache.spark.graphx.Graph
Gets the name of the files to which this Graph was checkpointed.
getCheckpointFiles() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
getCheckpointFiles() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
If using checkpointing and LDA.keepLastCheckpoint is set to true, then there may be saved checkpoint files.
getCheckpointInterval() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.clustering.LDA
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getCheckpointInterval() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getCheckpointInterval() - Method in class org.apache.spark.mllib.clustering.LDA
Period (in iterations) between checkpoints.
getCheckpointInterval() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getClassifier() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getClassifier() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getCombOp() - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
Returns the function used combine results returned by seqOp from different partitions.
getConf() - Method in class org.apache.spark.api.java.JavaSparkContext
Return a copy of this JavaSparkContext's configuration.
getConf() - Method in class org.apache.spark.rdd.HadoopRDD
 
getConf() - Method in class org.apache.spark.rdd.NewHadoopRDD
 
getConf() - Method in class org.apache.spark.SparkContext
Return a copy of this SparkContext's configuration.
getConf(String) - Method in class org.apache.spark.sql.SQLContext
Return the value of Spark SQL configuration property for the given key.
getConf(String, String) - Method in class org.apache.spark.sql.SQLContext
Return the value of Spark SQL configuration property for the given key.
getConfiguredLocalDirs(SparkConf) - Static method in class org.apache.spark.util.Utils
Return the configured local directories where Spark can write files.
getConnection() - Method in interface org.apache.spark.rdd.JdbcRDD.ConnectionFactory
 
getConsumerOffsetMetadata(String, Set<TopicAndPartition>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
Requires Kafka >= 0.8.1.1.
getConsumerOffsetMetadata(String, Set<TopicAndPartition>, short) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getConsumerOffsets(String, Set<TopicAndPartition>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
Requires Kafka >= 0.8.1.1.
getConsumerOffsets(String, Set<TopicAndPartition>, short) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getContextOrSparkClassLoader() - Static method in class org.apache.spark.util.Utils
Get the Context ClassLoader on this thread or, if not present, the ClassLoader that loaded Spark.
getConvergenceTol() - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Return the largest change in log-likelihood at which convergence is considered to have occurred.
getCorrelationFromName(String) - Static method in class org.apache.spark.mllib.stat.correlation.Correlations
 
getCurrentKey() - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
getCurrentUserGroups(SparkConf, String) - Static method in class org.apache.spark.util.Utils
 
getCurrentUserName() - Static method in class org.apache.spark.util.Utils
Returns the current user name.
getCurrentValue() - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
getDate(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of date type as java.sql.Date.
getDecimal(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of decimal type as java.math.BigDecimal.
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.LDA
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.DCT
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IDF
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Interaction
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.NGram
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PCA
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormula
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getDefault(Param<T>) - Method in interface org.apache.spark.ml.param.Params
Gets the default value of a parameter.
getDefault(Param<T>) - Static method in class org.apache.spark.ml.Pipeline
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.PipelineModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getDefaultPropertiesFile(Map<String, String>) - Static method in class org.apache.spark.util.Utils
Return the path of the default Spark properties file.
getDegree() - Method in class org.apache.spark.ml.feature.PolynomialExpansion
 
getDependencies() - Static method in class org.apache.spark.api.r.RRDD
 
getDependencies() - Static method in class org.apache.spark.graphx.EdgeRDD
 
getDependencies() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
getDependencies() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
getDependencies() - Static method in class org.apache.spark.graphx.VertexRDD
 
getDependencies() - Method in class org.apache.spark.rdd.CoGroupedRDD
 
getDependencies() - Static method in class org.apache.spark.rdd.HadoopRDD
 
getDependencies() - Static method in class org.apache.spark.rdd.JdbcRDD
 
getDependencies() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
getDependencies() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
getDependencies() - Method in class org.apache.spark.rdd.RDD
Implemented by subclasses to return how this RDD depends on parent RDDs.
getDependencies() - Method in class org.apache.spark.rdd.ShuffledRDD
 
getDependencies() - Method in class org.apache.spark.rdd.UnionRDD
 
getDeprecatedConfig(String, SparkConf) - Static method in class org.apache.spark.SparkConf
Looks for available deprecated keys for the given config option, and return the first value available.
getDocConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getDocConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
getDocConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getDocConcentration() - Method in class org.apache.spark.mllib.clustering.LDA
Concentration parameter (commonly named "alpha") for the prior placed on documents' distributions over topics ("theta").
getDouble(String, double) - Method in class org.apache.spark.SparkConf
Get a parameter as a double, falling back to a default if not set
getDouble(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive double.
getDouble(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Double.
getDoubleArray(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Double array.
getEarliestLeaderOffsets(Set<TopicAndPartition>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getElasticNetParam() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getElasticNetParam() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getElasticNetParam() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getElasticNetParam() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getEndTimeEpoch() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
getEpsilon() - Method in class org.apache.spark.mllib.clustering.KMeans
The distance threshold within which we've consider centers to have converged.
getEstimator() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getEstimator() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getEstimator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getEstimator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getEstimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getEstimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getEstimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getEstimatorParamMaps() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getEvaluator() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getEvaluator() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getEvaluator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getEvaluator() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getExecutorEnv() - Method in class org.apache.spark.SparkConf
Get all executor environment variables set on this SparkConf
getExecutorInfos() - Method in class org.apache.spark.SparkStatusTracker
Returns information of all known executors, including host, port, cacheSize, numRunningTasks.
getExecutorMemoryStatus() - Method in class org.apache.spark.SparkContext
Return a map from the slave to the max memory available for caching and the remaining memory available for caching.
getExecutorStorageStatus() - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Return information about blocks stored in all of the slaves
getExternalTmpPath(Path, Configuration) - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
getExtTmpPathRelTo(Path, Configuration) - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
getFamily() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getFamily() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getFeatureIndex() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getFeatureIndex() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getFeatureIndicesFromNames(StructField, String[]) - Static method in class org.apache.spark.ml.util.MetadataUtils
Takes a Vector column and a list of feature names, and returns the corresponding list of feature indices in the column, in order.
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.LDA
 
getFeaturesCol() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getFeaturesCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.feature.RFormula
 
getFeaturesCol() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getFeaturesCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getFeatureSubsetStrategy() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getFeatureSubsetStrategy() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getFeatureSubsetStrategy() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getFeatureSubsetStrategy() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getField(String) - Method in class org.apache.spark.sql.Column
An expression that gets a field by name in a StructType.
getFilePath(File, String) - Static method in class org.apache.spark.util.Utils
Return the absolute path of a file in the given directory.
getFileReader(String, Option<Configuration>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
Retrieves a ORC file reader from a given path.
getFileSegmentLocations(String, long, long, Configuration) - Static method in class org.apache.spark.streaming.util.HdfsUtils
Get the locations of the HDFS blocks containing the given file segment.
getFileSystemForPath(Path, Configuration) - Static method in class org.apache.spark.streaming.util.HdfsUtils
 
getFinalStorageLevel() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getFinalValue() - Method in class org.apache.spark.partial.PartialResult
Blocking method to wait for and return the final value.
getFitIntercept() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getFitIntercept() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getFitIntercept() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getFitIntercept() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getFitIntercept() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getFitIntercept() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getFitIntercept() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getFitIntercept() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getFloat(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive float.
getFormattedClassName(Object) - Static method in class org.apache.spark.util.Utils
Return the class name of the given object, removing all dollar signs
getFormula() - Method in class org.apache.spark.ml.feature.RFormula
 
getGaps() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
getGroups(String) - Method in interface org.apache.spark.security.GroupMappingServiceProvider
Get the groups the user belongs to.
getHadoopFileSystem(URI, Configuration) - Static method in class org.apache.spark.util.Utils
Return a Hadoop FileSystem with the scheme encoded in the given path.
getHadoopFileSystem(String, Configuration) - Static method in class org.apache.spark.util.Utils
Return a Hadoop FileSystem with the scheme encoded in the given path.
getHandleInvalid() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
getHandleInvalid() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
getImplicitPrefs() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getImpurity() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getImpurity() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getImpurity() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getImpurity() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getImpurity() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getImpurity() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getImpurity() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getImpurity() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getImpurity() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getImpurity() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getImpurity() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getImpurity() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getImpurity() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getIndices() - Method in class org.apache.spark.ml.feature.VectorSlicer
 
getInitializationMode() - Method in class org.apache.spark.mllib.clustering.KMeans
The initialization algorithm.
getInitializationSteps() - Method in class org.apache.spark.mllib.clustering.KMeans
Number of steps for the k-means|| initialization mode
getInitialModel() - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Return the user supplied initial GMM, if supplied
getInitialPositionInStream(int) - Method in class org.apache.spark.streaming.kinesis.KinesisUtilsPythonHelper
 
getInitMode() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getInitMode() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getInitSteps() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getInitSteps() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.Binarizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.DCT
 
getInputCol() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
getInputCol() - Static method in class org.apache.spark.ml.feature.HashingTF
 
getInputCol() - Static method in class org.apache.spark.ml.feature.IDF
 
getInputCol() - Static method in class org.apache.spark.ml.feature.IDFModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.IndexToString
 
getInputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
getInputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getInputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.NGram
 
getInputCol() - Static method in class org.apache.spark.ml.feature.Normalizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
getInputCol() - Static method in class org.apache.spark.ml.feature.PCA
 
getInputCol() - Static method in class org.apache.spark.ml.feature.PCAModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
getInputCol() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getInputCol() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
getInputCol() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
getInputCol() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
getInputCol() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getInputCol() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getInputCols() - Static method in class org.apache.spark.ml.feature.Interaction
 
getInputCols() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
getInputFileName() - Static method in class org.apache.spark.rdd.InputFileNameHolder
The thread variable for the name of the current file being read.
getInputFormat(JobConf) - Method in class org.apache.spark.rdd.HadoopRDD
 
getInputStream(String, Configuration) - Static method in class org.apache.spark.streaming.util.HdfsUtils
 
getInt(String, int) - Method in class org.apache.spark.SparkConf
Get a parameter as an integer, falling back to a default if not set
getInt(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive int.
getIntermediateStorageLevel() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getInverse() - Method in class org.apache.spark.ml.feature.DCT
 
getIsotonic() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getIsotonic() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getItem(Object) - Method in class org.apache.spark.sql.Column
An expression that gets an item at position ordinal out of an array, or gets a value by key key in a MapType.
getItemCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getItemCol() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
getIteratorSize(Iterator<T>) - Static method in class org.apache.spark.util.Utils
Counts the number of elements of an iterator using a while loop rather than calling TraversableOnce.size() because it uses a for loop, which is slightly slower in the current version of Scala.
getJavaMap(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of array type as a Map.
getJavaSparkContext(SQLContext) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
getJDBCType(DataType) - Method in class org.apache.spark.sql.jdbc.AggregatedDialect
 
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.DB2Dialect
 
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.DerbyDialect
 
getJDBCType(DataType) - Method in class org.apache.spark.sql.jdbc.JdbcDialect
Retrieve the jdbc / sql type for a given datatype.
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.NoopDialect
 
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
getJDBCType(DataType) - Static method in class org.apache.spark.sql.jdbc.PostgresDialect
 
getJobConf() - Method in class org.apache.spark.rdd.HadoopRDD
 
getJobIdsForGroup(String) - Method in class org.apache.spark.api.java.JavaSparkStatusTracker
Return a list of all known jobs in a particular job group.
getJobIdsForGroup(String) - Method in class org.apache.spark.SparkStatusTracker
Return a list of all known jobs in a particular job group.
getJobInfo(int) - Method in class org.apache.spark.api.java.JavaSparkStatusTracker
Returns job information, or null if the job info could not be found or was garbage collected.
getJobInfo(int) - Method in class org.apache.spark.SparkStatusTracker
Returns job information, or None if the job info could not be found or was garbage collected.
getK() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getK() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getK() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getK() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getK() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getK() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getK() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getK() - Static method in class org.apache.spark.ml.clustering.LDA
 
getK() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getK() - Static method in class org.apache.spark.ml.feature.PCA
 
getK() - Static method in class org.apache.spark.ml.feature.PCAModel
 
getK() - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Gets the desired number of leaf clusters.
getK() - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Return the number of Gaussians in the mixture model
getK() - Method in class org.apache.spark.mllib.clustering.KMeans
Number of clusters to create (k).
getK() - Method in class org.apache.spark.mllib.clustering.LDA
Number of topics to infer, i.e., the number of soft cluster centers.
getKappa() - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
Learning rate: exponential decay rate
getKeepLastCheckpoint() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getKeepLastCheckpoint() - Static method in class org.apache.spark.ml.clustering.LDA
 
getKeepLastCheckpoint() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getKeepLastCheckpoint() - Method in class org.apache.spark.mllib.clustering.EMLDAOptimizer
If using checkpointing, this indicates whether to keep the last checkpoint (vs clean up).
getLabelCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getLabelCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getLabelCol() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
getLabelCol() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
getLabelCol() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
getLabelCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getLabelCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getLabelCol() - Static method in class org.apache.spark.ml.feature.RFormula
 
getLabelCol() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getLabelCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getLabels() - Method in class org.apache.spark.ml.feature.IndexToString
 
getLambda() - Method in class org.apache.spark.mllib.classification.NaiveBayes
Get the smoothing parameter.
getLastUpdatedEpoch() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
getLatestLeaderOffsets(Set<TopicAndPartition>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getLayers() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getLDAModel(double[]) - Method in interface org.apache.spark.mllib.clustering.LDAOptimizer
 
getLeaderOffsets(Set<TopicAndPartition>, long) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getLeaderOffsets(Set<TopicAndPartition>, long, int) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getLearningDecay() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getLearningDecay() - Static method in class org.apache.spark.ml.clustering.LDA
 
getLearningDecay() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getLearningOffset() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getLearningOffset() - Static method in class org.apache.spark.ml.clustering.LDA
 
getLearningOffset() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getLearningRate() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
getLeastGroupHash(String) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
Sorts and gets the least element of the list associated with key in groupHash The returned PartitionGroup is the least loaded of all groups that represent the machine "key"
getLink() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getLink() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getLinkPredictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getLinkPredictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getList(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of array type as List.
getLocalDir(SparkConf) - Static method in class org.apache.spark.util.Utils
Get the path of a temporary directory.
getLocalizedMessage() - Static method in exception org.apache.spark.sql.AnalysisException
 
getLocalizedMessage() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
getLocalProperty(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Get a local property set in this thread, or null if it is missing.
getLocalProperty(String) - Method in class org.apache.spark.SparkContext
Get a local property set in this thread, or null if it is missing.
getLocalProperty(String) - Method in class org.apache.spark.TaskContext
Get a local property set upstream in the driver, or null if it is missing.
getLong(String, long) - Method in class org.apache.spark.SparkConf
Get a parameter as a long, falling back to a default if not set
getLong(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive long.
getLong(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Long.
getLongArray(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Long array.
getLoss() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
getLossType() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getLossType() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getLossType() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getLossType() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getLowerBound(double, long, double) - Static method in class org.apache.spark.util.random.BinomialBounds
Returns a threshold p such that if we conduct n Bernoulli trials with success rate = p, it is very unlikely to have more than fraction * n successes.
getLowerBound(double) - Static method in class org.apache.spark.util.random.PoissonBounds
Returns a lambda such that Pr[X > s] is very small, where X ~ Pois(lambda).
getMap(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of map type as a Scala Map.
getMap() - Method in class org.apache.spark.sql.types.MetadataBuilder
Returns the immutable version of this map.
getMax() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getMax() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getMaxBins() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getMaxBins() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getMaxBins() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getMaxBins() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getMaxBins() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getMaxBins() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getMaxBins() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getMaxBins() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getMaxBins() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getMaxBins() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getMaxBins() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getMaxBins() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getMaxBins() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getMaxCategories() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
getMaxCategories() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getMaxDepth() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getMaxDepth() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getMaxDepth() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getMaxDepth() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getMaxDepth() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getMaxDepth() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getMaxDepth() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getMaxFailures(SparkConf, boolean) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
getMaxIter() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getMaxIter() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getMaxIter() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getMaxIter() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getMaxIter() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.LDA
 
getMaxIter() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getMaxIter() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getMaxIter() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getMaxIter() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getMaxIter() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getMaxIterations() - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Gets the max number of k-means iterations to split clusters.
getMaxIterations() - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Return the maximum number of iterations allowed
getMaxIterations() - Method in class org.apache.spark.mllib.clustering.KMeans
Maximum number of iterations allowed.
getMaxIterations() - Method in class org.apache.spark.mllib.clustering.LDA
Maximum number of iterations allowed.
getMaxLocalProjDBSize() - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Gets the maximum number of items allowed in a projected database before local processing.
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getMaxMemoryInMB() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getMaxMemoryInMB() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getMaxPatternLength() - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Gets the maximal pattern length (i.e.
getMaxResultSize(SparkConf) - Static method in class org.apache.spark.util.Utils
 
getMessage() - Method in exception org.apache.spark.sql.AnalysisException
 
getMessage() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
getMetadata(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Metadata.
getMetadataArray(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a Metadata array.
getMetricName() - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
getMetricName() - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
getMetricName() - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
getMetricsSources(String) - Method in class org.apache.spark.TaskContext
::DeveloperApi:: Returns all metrics sources with the given name which are associated with the instance which runs the task.
getMin() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getMin() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getMinCount() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getMinCount() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getMinDF() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getMinDF() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getMinDivisibleClusterSize() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getMinDivisibleClusterSize() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getMinDivisibleClusterSize() - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Gets the minimum number of points (if >= 1.0) or the minimum proportion of points (if < 1.0) of a divisible cluster.
getMinDocFreq() - Static method in class org.apache.spark.ml.feature.IDF
 
getMinDocFreq() - Static method in class org.apache.spark.ml.feature.IDFModel
 
getMiniBatchFraction() - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
Mini-batch fraction, which sets the fraction of document sampled and used in each iteration
getMinInfoGain() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getMinInfoGain() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getMinInfoGain() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getMinInfoGain() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getMinInfoGain() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getMinInfoGain() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getMinInfoGain() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getMinInfoGain() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getMinInfoGain() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getMinInfoGain() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getMinInfoGain() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getMinInfoGain() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getMinInfoGain() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getMinInstancesPerNode() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getMinInstancesPerNode() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getMinSupport() - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Get the minimal support (i.e.
getMinTF() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getMinTF() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getMinTokenLength() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
getModel() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getModel() - Method in class org.apache.spark.ml.clustering.LDAModel
Returns underlying spark.mllib model, which may be local or distributed
getModel() - Method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getModelType() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getModelType() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getModelType() - Method in class org.apache.spark.mllib.classification.NaiveBayes
Get the model type.
getN() - Method in class org.apache.spark.ml.feature.NGram
 
getNames() - Method in class org.apache.spark.ml.feature.VectorSlicer
 
getNode(int, Node) - Static method in class org.apache.spark.mllib.tree.model.Node
Traces down from a root node to get the node with the given node index.
getNodeNumbered(MutableInt) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
getNonnegative() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getNumBuckets() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getNumClasses(Dataset<?>, int) - Method in class org.apache.spark.ml.classification.Classifier
Get the number of classes.
getNumClasses(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getNumClasses(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getNumClasses(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getNumClasses(Dataset<?>, int) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getNumClasses(StructField) - Static method in class org.apache.spark.ml.util.MetadataUtils
Examine a schema to identify the number of classes in a label column.
getNumClasses() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getNumClasses$default$2() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getNumClasses$default$2() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getNumClasses$default$2() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getNumClasses$default$2() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getNumFeatures() - Method in class org.apache.spark.ml.feature.HashingTF
 
getNumFeatures() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
getNumFeatures() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
getNumFeatures() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
The dimension of training features.
getNumFeatures() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
getNumFeatures() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
getNumFeatures() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
getNumFolds() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getNumFolds() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getNumItemBlocks() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getNumIterations() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
getNumObjFields() - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
getNumPartitions() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
getNumPartitions() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
getNumPartitions() - Static method in class org.apache.spark.api.java.JavaRDD
 
getNumPartitions() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return the number of partitions in this RDD.
getNumPartitions() - Static method in class org.apache.spark.api.r.RRDD
 
getNumPartitions() - Static method in class org.apache.spark.graphx.EdgeRDD
 
getNumPartitions() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
getNumPartitions() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
getNumPartitions() - Static method in class org.apache.spark.graphx.VertexRDD
 
getNumPartitions() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getNumPartitions() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getNumPartitions() - Static method in class org.apache.spark.rdd.HadoopRDD
 
getNumPartitions() - Static method in class org.apache.spark.rdd.JdbcRDD
 
getNumPartitions() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
getNumPartitions() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
getNumPartitions() - Method in class org.apache.spark.rdd.RDD
Returns the number of partitions of this RDD.
getNumTopFeatures() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getNumTopFeatures() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getNumTrees() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getNumTrees() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getNumTrees() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getNumTrees() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getNumTrees() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getNumTrees() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getNumUserBlocks() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getNumValues() - Method in class org.apache.spark.ml.attribute.NominalAttribute
Get the number of values, either from numValues or from values.
getObject(String) - Static method in class org.apache.spark.api.r.JVMObjectTracker
 
getObjectInspector() - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
getObjectInspector(String, Option<Configuration>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
getObjFieldValues(Object, Object[]) - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
getOldDataset(Dataset<?>, String) - Static method in class org.apache.spark.ml.clustering.LDA
Get dataset for spark.mllib LDA
getOldDocConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getOldDocConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
getOldDocConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getOldTopicConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getOldTopicConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
getOldTopicConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getOptimizeDocConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getOptimizeDocConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
getOptimizeDocConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getOptimizeDocConcentration() - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
Optimize docConcentration, indicates whether docConcentration (Dirichlet parameter for document-topic distribution) will be optimized during training.
getOptimizer() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getOptimizer() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getOptimizer() - Static method in class org.apache.spark.ml.clustering.LDA
 
getOptimizer() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getOptimizer() - Method in class org.apache.spark.mllib.clustering.LDA
:: DeveloperApi ::
getOption(String) - Method in class org.apache.spark.SparkConf
Get a parameter as an Option
getOption(String) - Method in class org.apache.spark.sql.RuntimeConfig
Returns the value of Spark runtime configuration property for the given key.
getOption() - Method in class org.apache.spark.streaming.State
Get the state as an Option.
getOrCreate(SparkConf) - Static method in class org.apache.spark.SparkContext
This function may be used to get or instantiate a SparkContext and register it as a singleton object.
getOrCreate() - Static method in class org.apache.spark.SparkContext
This function may be used to get or instantiate a SparkContext and register it as a singleton object.
getOrCreate() - Method in class org.apache.spark.sql.SparkSession.Builder
Gets an existing SparkSession or, if there is no existing one, creates a new one based on the options set in this builder.
getOrCreate(SparkContext) - Static method in class org.apache.spark.sql.SQLContext
Get the singleton SQLContext if it exists or create a new one using the given SparkContext.
getOrCreate(String, Function0<JavaStreamingContext>) - Static method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Either recreate a StreamingContext from checkpoint data or create a new StreamingContext.
getOrCreate(String, Function0<JavaStreamingContext>, Configuration) - Static method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Either recreate a StreamingContext from checkpoint data or create a new StreamingContext.
getOrCreate(String, Function0<JavaStreamingContext>, Configuration, boolean) - Static method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Either recreate a StreamingContext from checkpoint data or create a new StreamingContext.
getOrCreate(String, Function0<StreamingContext>, Configuration, boolean) - Static method in class org.apache.spark.streaming.StreamingContext
Either recreate a StreamingContext from checkpoint data or create a new StreamingContext.
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.LDA
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.DCT
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IDF
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Interaction
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.NGram
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PCA
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormula
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getOrDefault(Param<T>) - Method in interface org.apache.spark.ml.param.Params
Gets the value of a param in the embedded param map or its default value.
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.Pipeline
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.PipelineModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getOrDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getOrElse(Param<T>, T) - Method in class org.apache.spark.ml.param.ParamMap
Returns the value associated with a param or a default value.
getOutputCol() - Static method in class org.apache.spark.ml.feature.Binarizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.DCT
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.HashingTF
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.IDF
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.IDFModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.IndexToString
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.Interaction
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.NGram
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.Normalizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.PCA
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.PCAModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getOutputCol() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getOutputStream(String, Configuration) - Static method in class org.apache.spark.streaming.util.HdfsUtils
 
getP() - Method in class org.apache.spark.ml.feature.Normalizer
 
getParam(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getParam(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getParam(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getParam(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getParam(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getParam(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getParam(String) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getParam(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.KMeans
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.LDA
 
getParam(String) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getParam(String) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
getParam(String) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
getParam(String) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Binarizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
getParam(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
getParam(String) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.DCT
 
getParam(String) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
getParam(String) - Static method in class org.apache.spark.ml.feature.HashingTF
 
getParam(String) - Static method in class org.apache.spark.ml.feature.IDF
 
getParam(String) - Static method in class org.apache.spark.ml.feature.IDFModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.IndexToString
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Interaction
 
getParam(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
getParam(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
getParam(String) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.NGram
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Normalizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
getParam(String) - Static method in class org.apache.spark.ml.feature.PCA
 
getParam(String) - Static method in class org.apache.spark.ml.feature.PCAModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
getParam(String) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.RFormula
 
getParam(String) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getParam(String) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
getParam(String) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
getParam(String) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
getParam(String) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
getParam(String) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getParam(String) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getParam(String) - Method in interface org.apache.spark.ml.param.Params
Gets a param by its name.
getParam(String) - Static method in class org.apache.spark.ml.Pipeline
 
getParam(String) - Static method in class org.apache.spark.ml.PipelineModel
 
getParam(String) - Static method in class org.apache.spark.ml.recommendation.ALS
 
getParam(String) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getParam(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getParam(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getParam(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getParam(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getParam(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getParam(String) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getParam(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getParam(String) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getParam(String) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getParam(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getParam(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getParents(int) - Method in class org.apache.spark.NarrowDependency
Get the parent partitions for a child partition.
getParents(int) - Method in class org.apache.spark.OneToOneDependency
 
getParents(int) - Method in class org.apache.spark.RangeDependency
 
getPartition(long, long, int) - Method in class org.apache.spark.graphx.PartitionStrategy.CanonicalRandomVertexCut$
 
getPartition(long, long, int) - Method in class org.apache.spark.graphx.PartitionStrategy.EdgePartition1D$
 
getPartition(long, long, int) - Method in class org.apache.spark.graphx.PartitionStrategy.EdgePartition2D$
 
getPartition(long, long, int) - Method in interface org.apache.spark.graphx.PartitionStrategy
Returns the partition number for a given edge.
getPartition(long, long, int) - Method in class org.apache.spark.graphx.PartitionStrategy.RandomVertexCut$
 
getPartition(Object) - Method in class org.apache.spark.HashPartitioner
 
getPartition(Object) - Method in class org.apache.spark.Partitioner
 
getPartition(Object) - Method in class org.apache.spark.RangePartitioner
 
getPartitionId() - Static method in class org.apache.spark.TaskContext
Returns the partition id of currently active TaskContext.
getPartitionMetadata(Set<String>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getPartitions() - Method in class org.apache.spark.api.r.BaseRRDD
 
getPartitions() - Static method in class org.apache.spark.api.r.RRDD
 
getPartitions() - Method in class org.apache.spark.graphx.EdgeRDD
 
getPartitions() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
getPartitions() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
getPartitions() - Method in class org.apache.spark.graphx.VertexRDD
 
getPartitions() - Method in class org.apache.spark.rdd.CoGroupedRDD
 
getPartitions() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
getPartitions() - Method in class org.apache.spark.rdd.HadoopRDD
 
getPartitions() - Method in class org.apache.spark.rdd.JdbcRDD
 
getPartitions() - Method in class org.apache.spark.rdd.NewHadoopRDD
 
getPartitions() - Method in class org.apache.spark.rdd.PartitionPruningRDD
 
getPartitions() - Method in class org.apache.spark.rdd.RDD
Implemented by subclasses to return the set of partitions in this RDD.
getPartitions() - Method in class org.apache.spark.rdd.ShuffledRDD
 
getPartitions() - Method in class org.apache.spark.rdd.UnionRDD
 
getPartitions(Set<String>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
getPath() - Method in class org.apache.spark.input.PortableDataStream
 
getPattern() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
getPersistentRDDs() - Method in class org.apache.spark.api.java.JavaSparkContext
Returns an Java map of JavaRDDs that have marked themselves as persistent via cache() call.
getPersistentRDDs() - Method in class org.apache.spark.SparkContext
Returns an immutable map of RDDs that have marked themselves as persistent via cache() call.
getPoissonSamplingFunction(RDD<Tuple2<K, V>>, Map<K, Object>, boolean, long, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
Return the per partition sampling function used for sampling with replacement.
getPoolForName(String) - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Return the pool associated with the given name, if one exists
getPredictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getPredictionCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getPredictionCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getPredictionCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getPredictionCol() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
getPredictionCol() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
getPredictionCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getPredictionCol() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getPredictionCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getPreferredLocations(Partition) - Static method in class org.apache.spark.api.r.RRDD
 
getPreferredLocations(Partition) - Static method in class org.apache.spark.graphx.EdgeRDD
 
getPreferredLocations(Partition) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
getPreferredLocations(Partition) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
getPreferredLocations(Partition) - Static method in class org.apache.spark.graphx.VertexRDD
 
getPreferredLocations(Partition) - Method in class org.apache.spark.rdd.HadoopRDD
 
getPreferredLocations(Partition) - Static method in class org.apache.spark.rdd.JdbcRDD
 
getPreferredLocations(Partition) - Method in class org.apache.spark.rdd.NewHadoopRDD
 
getPreferredLocations(Partition) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
getPreferredLocations(Partition) - Method in class org.apache.spark.rdd.RDD
Optionally overridden by subclasses to specify placement preferences.
getPreferredLocations(Partition) - Method in class org.apache.spark.rdd.ShuffledRDD
 
getPreferredLocations(Partition) - Method in class org.apache.spark.rdd.UnionRDD
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getProbabilityCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getProbabilityCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getProbabilityCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getProcessName() - Static method in class org.apache.spark.util.Utils
Returns the name of this JVM process.
getProgress() - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
getPropertiesFromFile(String) - Static method in class org.apache.spark.util.Utils
Load properties present in the given file.
getQuantileCalculationStrategy() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getQuantileProbabilities() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getQuantileProbabilities() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getQuantilesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getQuantilesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getRank() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getRatingCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getRawPredictionCol() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
getRddBlockLocations(int, Seq<StorageStatus>) - Static method in class org.apache.spark.storage.StorageUtils
Return a mapping from block ID to its locations for each block that belongs to the given RDD.
getRDDStorageInfo() - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Return information about what RDDs are cached, if they are in mem or on disk, how much space they take, etc.
getReceiver() - Method in class org.apache.spark.streaming.dstream.ReceiverInputDStream
Gets the receiver object that will be sent to the worker nodes to receive data.
getRegParam() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getRegParam() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getRegParam() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getRegParam() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getRegParam() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getRegParam() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getRegParam() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getRelativeError() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getRelevantConstraints(Set<Expression>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
getRollingIntervalSecs(SparkConf, boolean) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
getRootDirectory() - Static method in class org.apache.spark.SparkFiles
Get the root directory that contains files added through SparkContext.addFile().
getRuns() - Method in class org.apache.spark.mllib.clustering.KMeans
This function has no effect since Spark 2.0.0.
getScalingVec() - Method in class org.apache.spark.ml.feature.ElementwiseProduct
 
getSchedulingMode() - Method in class org.apache.spark.SparkContext
Return current scheduling mode
getSeed() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getSeed() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getSeed() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getSeed() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getSeed() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getSeed() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getSeed() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getSeed() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
getSeed() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
getSeed() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getSeed() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getSeed() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getSeed() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getSeed() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getSeed() - Static method in class org.apache.spark.ml.clustering.LDA
 
getSeed() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getSeed() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
getSeed() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getSeed() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getSeed() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getSeed() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getSeed() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getSeed() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getSeed() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getSeed() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getSeed() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getSeed() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
getSeed() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
getSeed() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getSeed() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getSeed() - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Gets the random seed.
getSeed() - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Return the random seed
getSeed() - Method in class org.apache.spark.mllib.clustering.KMeans
The random seed for cluster initialization.
getSeed() - Method in class org.apache.spark.mllib.clustering.LDA
Random seed for cluster initialization.
getSeq(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of array type as a Scala Seq.
getSeqOp(boolean, Map<K, Object>, StratifiedSamplingUtils.RandomDataGenerator, Option<Map<K, Object>>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
Returns the function used by aggregate to collect sampling statistics for each partition.
getShort(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a primitive short.
getSizeAsBytes(String) - Method in class org.apache.spark.SparkConf
Get a size parameter as bytes; throws a NoSuchElementException if it's not set.
getSizeAsBytes(String, String) - Method in class org.apache.spark.SparkConf
Get a size parameter as bytes, falling back to a default if not set.
getSizeAsBytes(String, long) - Method in class org.apache.spark.SparkConf
Get a size parameter as bytes, falling back to a default if not set.
getSizeAsGb(String) - Method in class org.apache.spark.SparkConf
Get a size parameter as Gibibytes; throws a NoSuchElementException if it's not set.
getSizeAsGb(String, String) - Method in class org.apache.spark.SparkConf
Get a size parameter as Gibibytes, falling back to a default if not set.
getSizeAsKb(String) - Method in class org.apache.spark.SparkConf
Get a size parameter as Kibibytes; throws a NoSuchElementException if it's not set.
getSizeAsKb(String, String) - Method in class org.apache.spark.SparkConf
Get a size parameter as Kibibytes, falling back to a default if not set.
getSizeAsMb(String) - Method in class org.apache.spark.SparkConf
Get a size parameter as Mebibytes; throws a NoSuchElementException if it's not set.
getSizeAsMb(String, String) - Method in class org.apache.spark.SparkConf
Get a size parameter as Mebibytes, falling back to a default if not set.
getSlotDescs() - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
getSmoothing() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getSmoothing() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getSolver() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getSolver() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getSolver() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getSolver() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getSparkClassLoader() - Static method in class org.apache.spark.util.Utils
Get the ClassLoader which loaded Spark.
getSparkHome() - Method in class org.apache.spark.api.java.JavaSparkContext
Get Spark's home location from either a value set through the constructor, or the spark.home Java property, or the SPARK_HOME environment variable (in that order of preference).
getSparkOrYarnConfig(SparkConf, String, String) - Static method in class org.apache.spark.util.Utils
Return the value of a config either through the SparkConf or the Hadoop configuration if this is Yarn mode.
getSplit() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData
 
getSplits() - Method in class org.apache.spark.ml.feature.Bucketizer
 
getSQLDataType(String) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
getStackTrace() - Static method in exception org.apache.spark.sql.AnalysisException
 
getStackTrace() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
getStageInfo(int) - Method in class org.apache.spark.api.java.JavaSparkStatusTracker
Returns stage information, or null if the stage info could not be found or was garbage collected.
getStageInfo(int) - Method in class org.apache.spark.SparkStatusTracker
Returns stage information, or None if the stage info could not be found or was garbage collected.
getStagePath(String, int, int, String) - Method in class org.apache.spark.ml.Pipeline.SharedReadWrite$
Get path for saving the given stage.
getStages() - Method in class org.apache.spark.ml.Pipeline
 
getStandardization() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getStandardization() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getStandardization() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getStandardization() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getStartTimeEpoch() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
getState() - Method in interface org.apache.spark.launcher.SparkAppHandle
Returns the current application state.
getState() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
:: DeveloperApi ::
getState() - Method in class org.apache.spark.streaming.StreamingContext
:: DeveloperApi ::
getStatement() - Method in class org.apache.spark.ml.feature.SQLTransformer
 
getStderr(Process, long) - Static method in class org.apache.spark.util.Utils
Return the stderr of a process after waiting for the process to terminate.
getStepSize() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getStepSize() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getStepSize() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getStepSize() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getStepSize() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getStepSize() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getStepSize() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getStopWords() - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
getStorageLevel() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
getStorageLevel() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
getStorageLevel() - Static method in class org.apache.spark.api.java.JavaRDD
 
getStorageLevel() - Method in interface org.apache.spark.api.java.JavaRDDLike
Get the RDD's current storage level, or StorageLevel.NONE if none is set.
getStorageLevel() - Static method in class org.apache.spark.api.r.RRDD
 
getStorageLevel() - Static method in class org.apache.spark.graphx.EdgeRDD
 
getStorageLevel() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
getStorageLevel() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
getStorageLevel() - Static method in class org.apache.spark.graphx.VertexRDD
 
getStorageLevel() - Static method in class org.apache.spark.rdd.HadoopRDD
 
getStorageLevel() - Static method in class org.apache.spark.rdd.JdbcRDD
 
getStorageLevel() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
getStorageLevel() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
getStorageLevel() - Method in class org.apache.spark.rdd.RDD
Get the RDD's current storage level, or StorageLevel.NONE if none is set.
getString(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i as a String object.
getString(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a String.
getStringArray(String) - Method in class org.apache.spark.sql.types.Metadata
Gets a String array.
getStruct(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of struct type as an Row object.
getSubsamplingRate() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.clustering.LDA
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
getSubsamplingRate() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
getSubsamplingRate() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getSuppressed() - Static method in exception org.apache.spark.sql.AnalysisException
 
getSuppressed() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
getSystemProperties() - Static method in class org.apache.spark.util.Utils
Returns the system properties map that is thread-safe to iterator over.
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.DB2Dialect
 
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.DerbyDialect
 
getTableExistsQuery(String) - Method in class org.apache.spark.sql.jdbc.JdbcDialect
Get the SQL query that should be used to find if the given table exists.
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.NoopDialect
 
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
getTableExistsQuery(String) - Static method in class org.apache.spark.sql.jdbc.PostgresDialect
 
getTau0() - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
A (positive) learning parameter that downweights early iterations.
getThreadDump() - Static method in class org.apache.spark.util.Utils
Return a thread dump of all threads' stacktraces.
getThreshold() - Method in class org.apache.spark.ml.classification.LogisticRegression
 
getThreshold() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getThreshold() - Method in class org.apache.spark.ml.feature.Binarizer
 
getThreshold() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
Returns the threshold (if any) used for converting raw prediction scores into 0/1 predictions.
getThreshold() - Method in class org.apache.spark.mllib.classification.SVMModel
Returns the threshold (if any) used for converting raw prediction scores into 0/1 predictions.
getThresholds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
getThresholds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
getThresholds() - Method in class org.apache.spark.ml.classification.LogisticRegression
 
getThresholds() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getThresholds() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
getThresholds() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
getThresholds() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
getThresholds() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
getThresholds() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
getTimeAsMs(String) - Method in class org.apache.spark.SparkConf
Get a time parameter as milliseconds; throws a NoSuchElementException if it's not set.
getTimeAsMs(String, String) - Method in class org.apache.spark.SparkConf
Get a time parameter as milliseconds, falling back to a default if not set.
getTimeAsSeconds(String) - Method in class org.apache.spark.SparkConf
Get a time parameter as seconds; throws a NoSuchElementException if it's not set.
getTimeAsSeconds(String, String) - Method in class org.apache.spark.SparkConf
Get a time parameter as seconds, falling back to a default if not set.
getTimestamp(int) - Method in interface org.apache.spark.sql.Row
Returns the value at position i of date type as java.sql.Timestamp.
GETTING_RESULT_TIME() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
GETTING_RESULT_TIME() - Static method in class org.apache.spark.ui.ToolTips
 
gettingResult() - Method in class org.apache.spark.scheduler.TaskInfo
 
gettingResultTime() - Method in class org.apache.spark.scheduler.TaskInfo
The time when the task started remotely getting the result.
getTol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getTol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getTol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getTol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
getTol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
getTol() - Static method in class org.apache.spark.ml.clustering.KMeans
 
getTol() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
getTol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
getTol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
getTol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getTol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getTol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getTol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getToLowercase() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
getTopicConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getTopicConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
getTopicConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getTopicConcentration() - Method in class org.apache.spark.mllib.clustering.LDA
Concentration parameter (commonly named "beta" or "eta") for the prior placed on topics' distributions over terms.
getTopicDistributionCol() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
getTopicDistributionCol() - Static method in class org.apache.spark.ml.clustering.LDA
 
getTopicDistributionCol() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
getTrainRatio() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
getTrainRatio() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
getTreeStrategy() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
getUDTFor(String) - Static method in class org.apache.spark.sql.types.UDTRegistration
Returns the Class of UserDefinedType for the name of a given user class.
getUidMap(Params) - Static method in class org.apache.spark.ml.util.MetaAlgorithmReadWrite
Examine the given estimator (which may be a compound estimator) and extract a mapping from UIDs to corresponding Params instances.
getUpperBound(double, long, double) - Static method in class org.apache.spark.util.random.BinomialBounds
Returns a threshold p such that if we conduct n Bernoulli trials with success rate = p, it is very unlikely to have less than fraction * n successes.
getUpperBound(double) - Static method in class org.apache.spark.util.random.PoissonBounds
Returns a lambda such that Pr[X < s] is very small, where X ~ Pois(lambda).
getUsedTimeMs(long) - Static method in class org.apache.spark.util.Utils
Return the string to tell how long has passed in milliseconds.
getUseNodeIdCache() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
getUserCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
getUserCol() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
getValidationTol() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
getValue() - Method in class org.apache.spark.broadcast.Broadcast
Actually get the broadcasted value.
getValue(int) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Gets a value given its index.
getValuesMap(Seq<String>) - Method in interface org.apache.spark.sql.Row
Returns a Map(name -> value) for the requested fieldNames For primitive types if value is null it returns 'zero value' specific for primitive ie.
getVarianceCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
getVarianceCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
getVectors() - Method in class org.apache.spark.ml.feature.Word2VecModel
Returns a dataframe with two fields, "word" and "vector", with "word" being a String and and the vector the DenseVector that it is mapped to.
getVectors() - Method in class org.apache.spark.mllib.feature.Word2VecModel
Returns a map of words to their vector representations.
getVectorSize() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getVectorSize() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getVocabSize() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
getVocabSize() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
getWeightCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
getWeightCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
getWeightCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
getWeightCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
getWeightCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
getWeightCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
getWeightCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
getWeightCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
getWeights() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
getWindowSize() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
getWindowSize() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
getWithMean() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getWithMean() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
getWithStd() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
getWithStd() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
Gini - Class in org.apache.spark.mllib.tree.impurity
:: Experimental :: Class for calculating the Gini impurity during binary classification.
Gini() - Constructor for class org.apache.spark.mllib.tree.impurity.Gini
 
GLMClassificationModel - Class in org.apache.spark.mllib.classification.impl
Helper class for import/export of GLM classification models.
GLMClassificationModel() - Constructor for class org.apache.spark.mllib.classification.impl.GLMClassificationModel
 
GLMClassificationModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.classification.impl
 
GLMClassificationModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$
 
GLMClassificationModel.SaveLoadV1_0$.Data - Class in org.apache.spark.mllib.classification.impl
Model data for import/export
GLMClassificationModel.SaveLoadV1_0$.Data(Vector, double, Option<Object>) - Constructor for class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$.Data
 
GLMRegressionModel - Class in org.apache.spark.mllib.regression.impl
Helper methods for import/export of GLM regression models.
GLMRegressionModel() - Constructor for class org.apache.spark.mllib.regression.impl.GLMRegressionModel
 
GLMRegressionModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.regression.impl
 
GLMRegressionModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$
 
GLMRegressionModel.SaveLoadV1_0$.Data - Class in org.apache.spark.mllib.regression.impl
Model data for model import/export
GLMRegressionModel.SaveLoadV1_0$.Data(Vector, double) - Constructor for class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$.Data
 
glom() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
glom() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
glom() - Static method in class org.apache.spark.api.java.JavaRDD
 
glom() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD created by coalescing all elements within each partition into an array.
glom() - Static method in class org.apache.spark.api.r.RRDD
 
glom() - Static method in class org.apache.spark.graphx.EdgeRDD
 
glom() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
glom() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
glom() - Static method in class org.apache.spark.graphx.VertexRDD
 
glom() - Static method in class org.apache.spark.rdd.HadoopRDD
 
glom() - Static method in class org.apache.spark.rdd.JdbcRDD
 
glom() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
glom() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
glom() - Method in class org.apache.spark.rdd.RDD
Return an RDD created by coalescing all elements within each partition into an array.
glom() - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
glom() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying glom() to each RDD of this DStream.
glom() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
glom() - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
glom() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
glom() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
glom() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
glom() - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD is generated by applying glom() to each RDD of this DStream.
goodnessOfFit() - Method in class org.apache.spark.mllib.stat.test.ChiSqTest.NullHypothesis$
 
grad() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
gradient() - Method in class org.apache.spark.ml.classification.LogisticAggregator
 
gradient() - Method in class org.apache.spark.ml.regression.AFTAggregator
 
gradient() - Method in class org.apache.spark.ml.regression.LeastSquaresAggregator
 
Gradient - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Class used to compute the gradient for a loss function, given a single data point.
Gradient() - Constructor for class org.apache.spark.mllib.optimization.Gradient
 
gradient(double, double) - Static method in class org.apache.spark.mllib.tree.loss.AbsoluteError
Method to calculate the gradients for the gradient boosting calculation for least absolute error calculation.
gradient(double, double) - Static method in class org.apache.spark.mllib.tree.loss.LogLoss
Method to calculate the loss gradients for the gradient boosting calculation for binary classification The gradient with respect to F(x) is: - 4 y / (1 + exp(2 y F(x)))
gradient(double, double) - Method in interface org.apache.spark.mllib.tree.loss.Loss
Method to calculate the gradients for the gradient boosting calculation.
gradient(double, double) - Static method in class org.apache.spark.mllib.tree.loss.SquaredError
Method to calculate the gradients for the gradient boosting calculation for least squares error calculation.
GradientBoostedTrees - Class in org.apache.spark.ml.tree.impl
 
GradientBoostedTrees() - Constructor for class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
GradientBoostedTrees - Class in org.apache.spark.mllib.tree
A class that implements Stochastic Gradient Boosting for regression and binary classification.
GradientBoostedTrees(BoostingStrategy) - Constructor for class org.apache.spark.mllib.tree.GradientBoostedTrees
 
GradientBoostedTreesModel - Class in org.apache.spark.mllib.tree.model
Represents a gradient boosted trees model.
GradientBoostedTreesModel(Enumeration.Value, DecisionTreeModel[], double[]) - Constructor for class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
GradientDescent - Class in org.apache.spark.mllib.optimization
Class used to solve an optimization problem using Gradient Descent.
Graph<VD,ED> - Class in org.apache.spark.graphx
The Graph abstractly represents a graph with arbitrary objects associated with vertices and edges.
Graph(ClassTag<VD>, ClassTag<ED>) - Constructor for class org.apache.spark.graphx.Graph
 
GraphGenerators - Class in org.apache.spark.graphx.util
A collection of graph generating functions.
GraphGenerators() - Constructor for class org.apache.spark.graphx.util.GraphGenerators
 
GraphImpl<VD,ED> - Class in org.apache.spark.graphx.impl
An implementation of Graph to support computation on graphs.
GraphImpl(VertexRDD<VD>, ReplicatedVertexView<VD, ED>, ClassTag<VD>, ClassTag<ED>) - Constructor for class org.apache.spark.graphx.impl.GraphImpl
 
GraphImpl(ClassTag<VD>, ClassTag<ED>) - Constructor for class org.apache.spark.graphx.impl.GraphImpl
Default constructor is provided to support serialization
GraphLoader - Class in org.apache.spark.graphx
Provides utilities for loading Graphs from files.
GraphLoader() - Constructor for class org.apache.spark.graphx.GraphLoader
 
GraphOps<VD,ED> - Class in org.apache.spark.graphx
Contains additional functionality for Graph.
GraphOps(Graph<VD, ED>, ClassTag<VD>, ClassTag<ED>) - Constructor for class org.apache.spark.graphx.GraphOps
 
graphToGraphOps(Graph<VD, ED>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.Graph
Implicitly extracts the GraphOps member from a graph.
GraphXUtils - Class in org.apache.spark.graphx
 
GraphXUtils() - Constructor for class org.apache.spark.graphx.GraphXUtils
 
greater(Duration) - Method in class org.apache.spark.streaming.Duration
 
greater(Time) - Method in class org.apache.spark.streaming.Time
 
greaterEq(Duration) - Method in class org.apache.spark.streaming.Duration
 
greaterEq(Time) - Method in class org.apache.spark.streaming.Time
 
GreaterThan - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a value greater than value.
GreaterThan(String, Object) - Constructor for class org.apache.spark.sql.sources.GreaterThan
 
GreaterThanOrEqual - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a value greater than or equal to value.
GreaterThanOrEqual(String, Object) - Constructor for class org.apache.spark.sql.sources.GreaterThanOrEqual
 
greatest(Column...) - Static method in class org.apache.spark.sql.functions
Returns the greatest value of the list of values, skipping null values.
greatest(String, String...) - Static method in class org.apache.spark.sql.functions
Returns the greatest value of the list of column names, skipping null values.
greatest(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Returns the greatest value of the list of values, skipping null values.
greatest(String, Seq<String>) - Static method in class org.apache.spark.sql.functions
Returns the greatest value of the list of column names, skipping null values.
gridGraph(SparkContext, int, int) - Static method in class org.apache.spark.graphx.util.GraphGenerators
Create rows by cols grid graph with each vertex connected to its row+1 and col+1 neighbors.
groupArr() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
groupBy(Function<T, U>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
groupBy(Function<T, U>, int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
groupBy(Function<T, U>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
groupBy(Function<T, U>, int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
groupBy(Function<T, U>) - Static method in class org.apache.spark.api.java.JavaRDD
 
groupBy(Function<T, U>, int) - Static method in class org.apache.spark.api.java.JavaRDD
 
groupBy(Function<T, U>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD of grouped elements.
groupBy(Function<T, U>, int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD of grouped elements.
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.api.r.RRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.api.r.RRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.api.r.RRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.VertexRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.graphx.VertexRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.graphx.VertexRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
groupBy(Function1<T, K>, int, ClassTag<K>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
groupBy(Function1<T, K>, ClassTag<K>) - Method in class org.apache.spark.rdd.RDD
Return an RDD of grouped items.
groupBy(Function1<T, K>, int, ClassTag<K>) - Method in class org.apache.spark.rdd.RDD
Return an RDD of grouped elements.
groupBy(Function1<T, K>, Partitioner, ClassTag<K>, Ordering<K>) - Method in class org.apache.spark.rdd.RDD
Return an RDD of grouped items.
groupBy(Column...) - Method in class org.apache.spark.sql.Dataset
Groups the Dataset using the specified columns, so we can run aggregation on them.
groupBy(String, String...) - Method in class org.apache.spark.sql.Dataset
Groups the Dataset using the specified columns, so that we can run aggregation on them.
groupBy(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Groups the Dataset using the specified columns, so we can run aggregation on them.
groupBy(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Groups the Dataset using the specified columns, so that we can run aggregation on them.
groupBy(Function1<A, K>) - Static method in class org.apache.spark.sql.types.StructType
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.api.r.RRDD
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.graphx.EdgeRDD
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.graphx.VertexRDD
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.rdd.HadoopRDD
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.rdd.JdbcRDD
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
groupBy$default$4(Function1<T, K>, Partitioner) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
groupByKey(Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Group the values for each key in the RDD into a single sequence.
groupByKey(int) - Method in class org.apache.spark.api.java.JavaPairRDD
Group the values for each key in the RDD into a single sequence.
groupByKey() - Method in class org.apache.spark.api.java.JavaPairRDD
Group the values for each key in the RDD into a single sequence.
groupByKey(Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Group the values for each key in the RDD into a single sequence.
groupByKey(int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Group the values for each key in the RDD into a single sequence.
groupByKey() - Method in class org.apache.spark.rdd.PairRDDFunctions
Group the values for each key in the RDD into a single sequence.
groupByKey(Function1<T, K>, Encoder<K>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a KeyValueGroupedDataset where the data is grouped by the given key func.
groupByKey(MapFunction<T, K>, Encoder<K>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Java-specific) Returns a KeyValueGroupedDataset where the data is grouped by the given key func.
groupByKey() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey to each RDD.
groupByKey(int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey to each RDD.
groupByKey(Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey on each RDD of this DStream.
groupByKey() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKey(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKey(Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKey() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKey(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKey(Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKey() - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying groupByKey to each RDD.
groupByKey(int) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying groupByKey to each RDD.
groupByKey(Partitioner) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying groupByKey on each RDD.
groupByKeyAndWindow(Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey over a sliding window.
groupByKeyAndWindow(Duration, Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey over a sliding window.
groupByKeyAndWindow(Duration, Duration, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey over a sliding window on this DStream.
groupByKeyAndWindow(Duration, Duration, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying groupByKey over a sliding window on this DStream.
groupByKeyAndWindow(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKeyAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKeyAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKeyAndWindow(Duration, Duration, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
groupByKeyAndWindow(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKeyAndWindow(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKeyAndWindow(Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKeyAndWindow(Duration, Duration, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
groupByKeyAndWindow(Duration) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying groupByKey over a sliding window.
groupByKeyAndWindow(Duration, Duration) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying groupByKey over a sliding window.
groupByKeyAndWindow(Duration, Duration, int) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying groupByKey over a sliding window on this DStream.
groupByKeyAndWindow(Duration, Duration, Partitioner) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Create a new DStream by applying groupByKey over a sliding window on this DStream.
grouped(int) - Static method in class org.apache.spark.sql.types.StructType
 
groupEdges(Function2<ED, ED, ED>) - Method in class org.apache.spark.graphx.Graph
Merges multiple edges between two vertices into a single edge.
groupEdges(Function2<ED, ED, ED>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
groupHash() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
grouping(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: indicates whether a specified column in a GROUP BY list is aggregated or not, returns 1 for aggregated or 0 for not aggregated in the result set.
grouping(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: indicates whether a specified column in a GROUP BY list is aggregated or not, returns 1 for aggregated or 0 for not aggregated in the result set.
grouping_id(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the level of grouping, equals to
grouping_id(String, Seq<String>) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the level of grouping, equals to
GroupMappingServiceProvider - Interface in org.apache.spark.security
This Spark trait is used for mapping a given userName to a set of groups which it belongs to.
groupWith(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
Alias for cogroup.
groupWith(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>) - Method in class org.apache.spark.api.java.JavaPairRDD
Alias for cogroup.
groupWith(JavaPairRDD<K, W1>, JavaPairRDD<K, W2>, JavaPairRDD<K, W3>) - Method in class org.apache.spark.api.java.JavaPairRDD
Alias for cogroup.
groupWith(RDD<Tuple2<K, W>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Alias for cogroup.
groupWith(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Alias for cogroup.
groupWith(RDD<Tuple2<K, W1>>, RDD<Tuple2<K, W2>>, RDD<Tuple2<K, W3>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Alias for cogroup.
gt(double) - Static method in class org.apache.spark.ml.param.ParamValidators
Check if value > lowerBound
gt(Object) - Method in class org.apache.spark.sql.Column
Greater than.
gtEq(double) - Static method in class org.apache.spark.ml.param.ParamValidators
Check if value >= lowerBound
guard(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 

H

hadoopConfiguration() - Method in class org.apache.spark.api.java.JavaSparkContext
Returns the Hadoop configuration used for the Hadoop code (e.g.
hadoopConfiguration() - Method in class org.apache.spark.SparkContext
A default Hadoop Configuration for the Hadoop code (e.g.
hadoopFile(String, Class<F>, Class<K>, Class<V>, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a Hadoop file with an arbitrary InputFormat.
hadoopFile(String, Class<F>, Class<K>, Class<V>) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a Hadoop file with an arbitrary InputFormat
hadoopFile(String, Class<? extends InputFormat<K, V>>, Class<K>, Class<V>, int) - Method in class org.apache.spark.SparkContext
Get an RDD for a Hadoop file with an arbitrary InputFormat
hadoopFile(String, int, ClassTag<K>, ClassTag<V>, ClassTag<F>) - Method in class org.apache.spark.SparkContext
Smarter version of hadoopFile() that uses class tags to figure out the classes of keys, values and the InputFormat so that users don't need to pass them directly.
hadoopFile(String, ClassTag<K>, ClassTag<V>, ClassTag<F>) - Method in class org.apache.spark.SparkContext
Smarter version of hadoopFile() that uses class tags to figure out the classes of keys, values and the InputFormat so that users don't need to pass them directly.
hadoopRDD(JobConf, Class<F>, Class<K>, Class<V>, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a Hadoop-readable dataset from a Hadoop JobConf giving its InputFormat and any other necessary info (e.g.
hadoopRDD(JobConf, Class<F>, Class<K>, Class<V>) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a Hadoop-readable dataset from a Hadoop JobConf giving its InputFormat and any other necessary info (e.g.
HadoopRDD<K,V> - Class in org.apache.spark.rdd
:: DeveloperApi :: An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS, sources in HBase, or S3), using the older MapReduce API (org.apache.hadoop.mapred).
HadoopRDD(SparkContext, Broadcast<org.apache.spark.util.SerializableConfiguration>, Option<Function1<JobConf, BoxedUnit>>, Class<? extends InputFormat<K, V>>, Class<K>, Class<V>, int) - Constructor for class org.apache.spark.rdd.HadoopRDD
 
HadoopRDD(SparkContext, JobConf, Class<? extends InputFormat<K, V>>, Class<K>, Class<V>, int) - Constructor for class org.apache.spark.rdd.HadoopRDD
 
hadoopRDD(JobConf, Class<? extends InputFormat<K, V>>, Class<K>, Class<V>, int) - Method in class org.apache.spark.SparkContext
Get an RDD for a Hadoop-readable dataset from a Hadoop JobConf given its InputFormat and other necessary info (e.g.
HadoopRDD.HadoopMapPartitionsWithSplitRDD$ - Class in org.apache.spark.rdd
 
HadoopRDD.HadoopMapPartitionsWithSplitRDD$() - Constructor for class org.apache.spark.rdd.HadoopRDD.HadoopMapPartitionsWithSplitRDD$
 
hammingLoss() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns Hamming-loss
handleInvalid() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
handleInvalid() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
handleWhiteSpace(CharSequence, int) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
hasAttr(String) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Test whether this attribute group contains a specific attribute.
hasBytesSpilled() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.LDA
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.DCT
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IDF
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Interaction
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.NGram
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PCA
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormula
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
hasDefault(Param<T>) - Method in interface org.apache.spark.ml.param.Params
Tests whether the input param has a default value set.
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.Pipeline
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.PipelineModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
hasDefault(Param<T>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
hasDefiniteSize() - Static method in class org.apache.spark.sql.types.StructType
 
hash(Column...) - Static method in class org.apache.spark.sql.functions
Calculates the hash code of given columns, and returns the result as an int column.
hash(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Calculates the hash code of given columns, and returns the result as an int column.
hashCode() - Method in class org.apache.spark.api.java.Optional
 
hashCode() - Method in class org.apache.spark.graphx.EdgeDirection
 
hashCode() - Method in class org.apache.spark.HashPartitioner
 
hashCode() - Method in class org.apache.spark.ml.attribute.AttributeGroup
 
hashCode() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
hashCode() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
hashCode() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
hashCode() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
hashCode() - Method in class org.apache.spark.ml.linalg.DenseVector
 
hashCode() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
hashCode() - Method in class org.apache.spark.ml.linalg.SparseVector
 
hashCode() - Method in interface org.apache.spark.ml.linalg.Vector
Returns a hash code value for the vector.
hashCode() - Static method in class org.apache.spark.ml.param.DoubleParam
 
hashCode() - Static method in class org.apache.spark.ml.param.FloatParam
 
hashCode() - Method in class org.apache.spark.ml.param.Param
 
hashCode() - Method in class org.apache.spark.ml.tree.CategoricalSplit
 
hashCode() - Method in class org.apache.spark.ml.tree.ContinuousSplit
 
hashCode() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
hashCode() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
hashCode() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
hashCode() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
hashCode() - Method in interface org.apache.spark.mllib.linalg.Vector
Returns a hash code value for the vector.
hashCode() - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
hashCode() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
hashCode() - Method in class org.apache.spark.mllib.tree.model.Predict
 
hashCode() - Method in class org.apache.spark.partial.BoundedDouble
 
hashCode() - Method in interface org.apache.spark.Partition
 
hashCode() - Method in class org.apache.spark.RangePartitioner
 
hashCode() - Method in class org.apache.spark.scheduler.cluster.ExecutorInfo
 
hashCode() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
hashCode() - Method in class org.apache.spark.scheduler.SplitInfo
 
hashCode() - Method in class org.apache.spark.sql.Column
 
hashCode() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
hashCode() - Method in interface org.apache.spark.sql.Row
 
hashCode() - Method in class org.apache.spark.sql.sources.In
 
hashCode() - Method in class org.apache.spark.sql.types.Decimal
 
hashCode() - Method in class org.apache.spark.sql.types.Metadata
 
hashCode() - Method in class org.apache.spark.sql.types.StructType
 
hashCode() - Method in class org.apache.spark.storage.BlockId
 
hashCode() - Method in class org.apache.spark.storage.BlockManagerId
 
hashCode() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
hashCode() - Static method in class org.apache.spark.storage.RDDBlockId
 
hashCode() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
hashCode() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
hashCode() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
hashCode() - Method in class org.apache.spark.storage.StorageLevel
 
hashCode() - Static method in class org.apache.spark.storage.StreamBlockId
 
hashCode() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
hashCode() - Method in class org.apache.spark.streaming.kafka.Broker
 
hashCode() - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
HashingTF - Class in org.apache.spark.ml.feature
:: Experimental :: Maps a sequence of terms to their term frequencies using the hashing trick.
HashingTF(String) - Constructor for class org.apache.spark.ml.feature.HashingTF
 
HashingTF() - Constructor for class org.apache.spark.ml.feature.HashingTF
 
HashingTF - Class in org.apache.spark.mllib.feature
Maps a sequence of terms to their term frequencies using the hashing trick.
HashingTF(int) - Constructor for class org.apache.spark.mllib.feature.HashingTF
 
HashingTF() - Constructor for class org.apache.spark.mllib.feature.HashingTF
 
HashPartitioner - Class in org.apache.spark
A Partitioner that implements hash-based partitioning using Java's Object.hashCode.
HashPartitioner(int) - Constructor for class org.apache.spark.HashPartitioner
 
hasInput() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
hasLabelCol(StructType) - Static method in class org.apache.spark.ml.feature.RFormula
 
hasLabelCol(StructType) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
hasNext() - Method in class org.apache.spark.InterruptibleIterator
 
HasOffsetRanges - Interface in org.apache.spark.streaming.kafka
Represents any object that has a collection of OffsetRanges.
hasOutput() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
hasParam(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.KMeans
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.LDA
 
hasParam(String) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
hasParam(String) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
hasParam(String) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
hasParam(String) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Binarizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.DCT
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.HashingTF
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.IDF
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.IDFModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.IndexToString
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Interaction
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.NGram
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Normalizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.PCA
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.PCAModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.RFormula
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
hasParam(String) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
hasParam(String) - Method in interface org.apache.spark.ml.param.Params
Tests whether this instance contains a param with a given name.
hasParam(String) - Static method in class org.apache.spark.ml.Pipeline
 
hasParam(String) - Static method in class org.apache.spark.ml.PipelineModel
 
hasParam(String) - Static method in class org.apache.spark.ml.recommendation.ALS
 
hasParam(String) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
hasParam(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
hasParam(String) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
hasParam(String) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
hasParam(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
hasParam(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
hasParent() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
hasParent() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
hasParent() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
hasParent() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
hasParent() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
hasParent() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
hasParent() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.IDFModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.PCAModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
hasParent() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
hasParent() - Method in class org.apache.spark.ml.Model
Indicates whether this Model has a corresponding parent.
hasParent() - Static method in class org.apache.spark.ml.PipelineModel
 
hasParent() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
hasParent() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
hasParent() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
hasQuantilesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
hasQuantilesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
hasRootAsShutdownDeleteDir(File) - Static method in class org.apache.spark.util.ShutdownHookManager
 
hasShuffleRead() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
hasShuffleWrite() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
hasShutdownDeleteDir(File) - Static method in class org.apache.spark.util.ShutdownHookManager
 
hasSummary() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
Indicates whether a training summary exists for this model instance.
hasSummary() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
Return true if there exists summary of model.
hasSummary() - Method in class org.apache.spark.ml.clustering.KMeansModel
Return true if there exists summary of model.
hasSummary() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
Indicates if summary is available.
hasSummary() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
Indicates whether a training summary exists for this model instance.
hasValue(String) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Tests whether this attribute contains a specific value.
hasWeightCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
hasWeightCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
hasWriteObjectMethod() - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
hasWriteReplaceMethod() - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
HdfsUtils - Class in org.apache.spark.streaming.util
 
HdfsUtils() - Constructor for class org.apache.spark.streaming.util.HdfsUtils
 
head(int) - Method in class org.apache.spark.sql.Dataset
Returns the first n rows.
head() - Method in class org.apache.spark.sql.Dataset
Returns the first row.
head() - Static method in class org.apache.spark.sql.types.StructType
 
head() - Static method in class org.apache.spark.sql.types.StructType
 
headerSparkPage(String, Function0<Seq<Node>>, SparkUITab, Option<Object>, Option<String>, boolean) - Static method in class org.apache.spark.ui.UIUtils
Returns a spark page with correctly formatted headers
headOption() - Static method in class org.apache.spark.sql.types.StructType
 
hex(Column) - Static method in class org.apache.spark.sql.functions
Computes hex value of the given column.
high() - Method in class org.apache.spark.partial.BoundedDouble
 
HingeGradient - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Compute gradient and loss for a Hinge loss function, as used in SVM binary classification.
HingeGradient() - Constructor for class org.apache.spark.mllib.optimization.HingeGradient
 
histogram(int) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute a histogram of the data using bucketCount number of buckets evenly spaced between the minimum and maximum of the RDD.
histogram(double[]) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute a histogram using the provided buckets.
histogram(Double[], boolean) - Method in class org.apache.spark.api.java.JavaDoubleRDD
 
histogram(int) - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute a histogram of the data using bucketCount number of buckets evenly spaced between the minimum and maximum of the RDD.
histogram(double[], boolean) - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute a histogram using the provided buckets.
HIVE_EXECUTION_VERSION() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
HIVE_GENERIC_UDF_MACRO_CLS() - Static method in class org.apache.spark.sql.hive.HiveShim
 
HIVE_METASTORE_BARRIER_PREFIXES() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
HIVE_METASTORE_JARS() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
HIVE_METASTORE_SHARED_PREFIXES() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
HIVE_METASTORE_VERSION() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
HIVE_THRIFT_SERVER_ASYNC() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
HiveContext - Class in org.apache.spark.sql.hive
Deprecated.
Use SparkSession.builder.enableHiveSupport instead. Since 2.0.0.
HiveContext(SparkContext) - Constructor for class org.apache.spark.sql.hive.HiveContext
Deprecated.
 
HiveContext(JavaSparkContext) - Constructor for class org.apache.spark.sql.hive.HiveContext
Deprecated.
 
hiveExecutionVersion() - Static method in class org.apache.spark.sql.hive.HiveUtils
The version of hive used internally by Spark SQL.
HiveSerDe - Class in org.apache.spark.sql.internal
 
HiveSerDe(Option<String>, Option<String>, Option<String>) - Constructor for class org.apache.spark.sql.internal.HiveSerDe
 
HiveShim - Class in org.apache.spark.sql.hive
 
HiveShim() - Constructor for class org.apache.spark.sql.hive.HiveShim
 
HiveShim.HiveFunctionWrapper$ - Class in org.apache.spark.sql.hive
 
HiveShim.HiveFunctionWrapper$() - Constructor for class org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper$
 
HiveTableUtil - Class in org.apache.spark.sql.hive
 
HiveTableUtil() - Constructor for class org.apache.spark.sql.hive.HiveTableUtil
 
HiveUtils - Class in org.apache.spark.sql.hive
 
HiveUtils() - Constructor for class org.apache.spark.sql.hive.HiveUtils
 
horzcat(Matrix[]) - Static method in class org.apache.spark.ml.linalg.Matrices
Horizontally concatenate a sequence of matrices.
horzcat(Matrix[]) - Static method in class org.apache.spark.mllib.linalg.Matrices
Horizontally concatenate a sequence of matrices.
host() - Method in class org.apache.spark.scheduler.TaskInfo
 
host() - Method in interface org.apache.spark.SparkExecutorInfo
 
host() - Method in class org.apache.spark.SparkExecutorInfoImpl
 
host() - Method in class org.apache.spark.status.api.v1.TaskData
 
host() - Method in class org.apache.spark.storage.BlockManagerId
 
host() - Method in class org.apache.spark.streaming.kafka.Broker
Broker's hostname
host() - Method in class org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset
 
hostLocation() - Method in class org.apache.spark.scheduler.SplitInfo
 
hostname() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisteredExecutor
 
hostname() - Method in class org.apache.spark.scheduler.cluster.mesos.Slave
 
hostPort() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
hostPort() - Method in class org.apache.spark.storage.BlockManagerId
 
hostToLocalTaskCount() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RequestExecutors
 
hour(Column) - Static method in class org.apache.spark.sql.functions
Extracts the hours as an integer from a given date/timestamp/string.
hours() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
htmlResponderToServlet(Function1<HttpServletRequest, Seq<Node>>) - Static method in class org.apache.spark.ui.JettyUtils
 
hypot(Column, Column) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(Column, String) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(String, Column) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(String, String) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(Column, double) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(String, double) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(double, Column) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.
hypot(double, String) - Static method in class org.apache.spark.sql.functions
Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.

I

i() - Method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
id() - Method in class org.apache.spark.Accumulable
Deprecated.
 
id() - Static method in class org.apache.spark.Accumulator
Deprecated.
 
id() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
id() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
id() - Static method in class org.apache.spark.api.java.JavaRDD
 
id() - Method in interface org.apache.spark.api.java.JavaRDDLike
A unique ID for this RDD (within its SparkContext).
id() - Static method in class org.apache.spark.api.r.RRDD
 
id() - Method in class org.apache.spark.broadcast.Broadcast
 
id() - Static method in class org.apache.spark.graphx.EdgeRDD
 
id() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
id() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
id() - Static method in class org.apache.spark.graphx.VertexRDD
 
id() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
id() - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering.Assignment
 
id() - Method in class org.apache.spark.mllib.tree.model.Node
 
id() - Static method in class org.apache.spark.rdd.HadoopRDD
 
id() - Static method in class org.apache.spark.rdd.JdbcRDD
 
id() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
id() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
id() - Method in class org.apache.spark.rdd.RDD
A unique ID for this RDD (within its SparkContext).
id() - Method in class org.apache.spark.scheduler.AccumulableInfo
 
id() - Method in class org.apache.spark.scheduler.TaskInfo
 
id() - Method in class org.apache.spark.status.api.v1.AccumulableInfo
 
id() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
id() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
id() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
id() - Method in class org.apache.spark.storage.RDDInfo
 
id() - Method in class org.apache.spark.streaming.dstream.InputDStream
This is an unique identifier for the input stream.
id() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
id() - Method in class org.apache.spark.util.AccumulatorV2
Returns the id of this accumulator, can only be called after registration.
Identifiable - Interface in org.apache.spark.ml.util
:: DeveloperApi ::
IDF - Class in org.apache.spark.ml.feature
:: Experimental :: Compute the Inverse Document Frequency (IDF) given a collection of documents.
IDF(String) - Constructor for class org.apache.spark.ml.feature.IDF
 
IDF() - Constructor for class org.apache.spark.ml.feature.IDF
 
idf() - Method in class org.apache.spark.ml.feature.IDFModel
Returns the IDF vector.
IDF - Class in org.apache.spark.mllib.feature
Inverse document frequency (IDF).
IDF(int) - Constructor for class org.apache.spark.mllib.feature.IDF
 
IDF() - Constructor for class org.apache.spark.mllib.feature.IDF
 
idf() - Method in class org.apache.spark.mllib.feature.IDF.DocumentFrequencyAggregator
Returns the current IDF vector.
idf() - Method in class org.apache.spark.mllib.feature.IDFModel
 
IDF.DocumentFrequencyAggregator - Class in org.apache.spark.mllib.feature
Document frequency aggregator.
IDF.DocumentFrequencyAggregator(int) - Constructor for class org.apache.spark.mllib.feature.IDF.DocumentFrequencyAggregator
 
IDF.DocumentFrequencyAggregator() - Constructor for class org.apache.spark.mllib.feature.IDF.DocumentFrequencyAggregator
 
IDFModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by IDF.
IDFModel - Class in org.apache.spark.mllib.feature
Represents an IDF model that can transform term frequency vectors.
ifNotExists() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
implicitPrefs() - Static method in class org.apache.spark.ml.recommendation.ALS
 
implicits() - Method in class org.apache.spark.sql.SparkSession
Accessor for nested Scala object
implicits() - Method in class org.apache.spark.sql.SQLContext
Accessor for nested Scala object
improveException(Object, NotSerializableException) - Static method in class org.apache.spark.serializer.SerializationDebugger
Improve the given NotSerializableException with the serialization path leading from the given object to the problematic object.
Impurities - Class in org.apache.spark.mllib.tree.impurity
Factory for Impurity instances.
Impurities() - Constructor for class org.apache.spark.mllib.tree.impurity.Impurities
 
impurity() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
impurity() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
impurity() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
impurity() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
impurity() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
impurity() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
impurity() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
impurity() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
impurity() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
impurity() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
impurity() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
impurity() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
impurity() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
impurity() - Method in class org.apache.spark.ml.tree.InternalNode
 
impurity() - Method in class org.apache.spark.ml.tree.LeafNode
 
impurity() - Method in class org.apache.spark.ml.tree.Node
Impurity measure at this node (for training data)
impurity() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
Impurity - Interface in org.apache.spark.mllib.tree.impurity
:: Experimental :: Trait for calculating information gain.
impurity() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
impurity() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
impurity() - Method in class org.apache.spark.mllib.tree.model.Node
 
impurityStats() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
In() - Static method in class org.apache.spark.graphx.EdgeDirection
Edges arriving at a vertex.
In - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to one of the values in the array.
In(String, Object[]) - Constructor for class org.apache.spark.sql.sources.In
 
INACTIVE() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
inArray(Object) - Static method in class org.apache.spark.ml.param.ParamValidators
Check for value in an allowed set of values.
inArray(List<T>) - Static method in class org.apache.spark.ml.param.ParamValidators
Check for value in an allowed set of values.
IncompatibleMergeException - Exception in org.apache.spark.util.sketch
 
IncompatibleMergeException(String) - Constructor for exception org.apache.spark.util.sketch.IncompatibleMergeException
 
inDegrees() - Method in class org.apache.spark.graphx.GraphOps
The in-degree of each vertex in the graph.
independence() - Method in class org.apache.spark.mllib.stat.test.ChiSqTest.NullHypothesis$
 
index() - Method in class org.apache.spark.ml.attribute.Attribute
Index of the attribute.
INDEX() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
index() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
index() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
index() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
index() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
index(int, int) - Method in interface org.apache.spark.ml.linalg.Matrix
Return the index for the (i, j)-th element in the backing array.
index() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
index(int, int) - Method in interface org.apache.spark.mllib.linalg.Matrix
Return the index for the (i, j)-th element in the backing array.
index() - Method in interface org.apache.spark.Partition
Get the partition's index within its parent RDD
index() - Method in class org.apache.spark.scheduler.TaskInfo
 
index() - Method in class org.apache.spark.status.api.v1.TaskData
 
IndexedRow - Class in org.apache.spark.mllib.linalg.distributed
Represents a row of IndexedRowMatrix.
IndexedRow(long, Vector) - Constructor for class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
IndexedRowMatrix - Class in org.apache.spark.mllib.linalg.distributed
Represents a row-oriented DistributedMatrix with indexed rows.
IndexedRowMatrix(RDD<IndexedRow>, long, int) - Constructor for class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
 
IndexedRowMatrix(RDD<IndexedRow>) - Constructor for class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Alternative constructor leaving matrix dimensions to be determined automatically.
indexOf(String) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Index of an attribute specified by name.
indexOf(String) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Index of a specific value.
indexOf(Object) - Method in class org.apache.spark.mllib.feature.HashingTF
Returns the index of the input term.
indexOf(B) - Static method in class org.apache.spark.sql.types.StructType
 
indexOf(B, int) - Static method in class org.apache.spark.sql.types.StructType
 
indexOfSlice(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
indexOfSlice(GenSeq<B>, int) - Static method in class org.apache.spark.sql.types.StructType
 
indexToLevel(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Return the level of a tree which the given node is in.
IndexToString - Class in org.apache.spark.ml.feature
:: Experimental :: A Transformer that maps a column of indices back to a new column of corresponding string values.
IndexToString() - Constructor for class org.apache.spark.ml.feature.IndexToString
 
indexWhere(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
indexWhere(Function1<A, Object>, int) - Static method in class org.apache.spark.sql.types.StructType
 
indices() - Method in class org.apache.spark.ml.feature.VectorSlicer
An array of indices to select features from a vector column.
indices() - Method in class org.apache.spark.ml.linalg.SparseVector
 
indices() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
indices() - Static method in class org.apache.spark.sql.types.StructType
 
inferSchema(SparkSession, Map<String, String>, Seq<FileStatus>) - Method in class org.apache.spark.ml.source.libsvm.DefaultSource
 
infoChanged(SparkAppHandle) - Method in interface org.apache.spark.launcher.SparkAppHandle.Listener
Callback for changes in any information that is not the handle's state.
infoGain() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
InformationGainStats - Class in org.apache.spark.mllib.tree.model
:: DeveloperApi :: Information gain statistics for each split param: gain information gain value param: impurity current node impurity param: leftImpurity left node impurity param: rightImpurity right node impurity param: leftPredict left node predict param: rightPredict right node predict
InformationGainStats(double, double, double, double, Predict, Predict) - Constructor for class org.apache.spark.mllib.tree.model.InformationGainStats
 
init() - Static method in class org.apache.spark.sql.types.StructType
 
initcap(Column) - Static method in class org.apache.spark.sql.functions
Returns a new string column by converting the first letter of each word to uppercase.
initCause(Throwable) - Static method in exception org.apache.spark.sql.AnalysisException
 
initCause(Throwable) - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
initDaemon(Logger) - Static method in class org.apache.spark.util.Utils
Utility function that should be called early in main() for daemons to set up some common diagnostic state.
initialHash() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
initialize(InputSplit, TaskAttemptContext) - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
initialize(double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
initialize(double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
initialize(double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
initialize(double, double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
initialize(RDD<Tuple2<Object, Vector>>, LDA) - Method in interface org.apache.spark.mllib.clustering.LDAOptimizer
Initializer for the optimizer.
initialize(MutableAggregationBuffer) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Initializes the given aggregation buffer, i.e.
Initialized() - Static method in class org.apache.spark.rdd.CheckpointState
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.api.r.RRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.EdgeRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.GraphLoader
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.lib.PageRank
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.Pregel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.graphx.VertexRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.KMeans
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.LDA
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Binarizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.DCT
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.HashingTF
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.IDF
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.IDFModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.IndexToString
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Interaction
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.NGram
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Normalizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.PCA
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.PCAModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.RFormula
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.Pipeline
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.PipelineModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.recommendation.ALS
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.clustering.LDA
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.tree.model.Node
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.mllib.util.DataValidators
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.HadoopRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.JdbcRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.rdd.RDD
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.serializer.KryoSerializer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.SparkConf
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.SparkContext
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.SparkEnv
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.Column
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.SparkSession
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.SQLContext
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.storage.StorageUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.streaming.CheckpointReader
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.streaming.dstream.DStream
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.streaming.StreamingContext
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ui.JettyUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.ui.UIUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.ClosureCleaner
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.ShutdownHookManager
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.SignalUtils
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.SizeEstimator
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
initializeLogIfNecessary(boolean) - Static method in class org.apache.spark.util.Utils
 
initialState(RDD<Tuple2<KeyType, StateType>>) - Method in class org.apache.spark.streaming.StateSpec
Set the RDD containing the initial states that will be used by `mapWithState`
initialState(JavaPairRDD<KeyType, StateType>) - Method in class org.apache.spark.streaming.StateSpec
Set the RDD containing the initial states that will be used by `mapWithState`
initialValue() - Method in class org.apache.spark.partial.PartialResult
 
initMode() - Static method in class org.apache.spark.ml.clustering.KMeans
 
initMode() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
inits() - Static method in class org.apache.spark.sql.types.StructType
 
initSteps() - Static method in class org.apache.spark.ml.clustering.KMeans
 
initSteps() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
innerChildren() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
InnerClosureFinder - Class in org.apache.spark.util
 
InnerClosureFinder(Set<Class<?>>) - Constructor for class org.apache.spark.util.InnerClosureFinder
 
innerJoin(EdgeRDD<ED2>, Function4<Object, Object, ED, ED2, ED3>, ClassTag<ED2>, ClassTag<ED3>) - Method in class org.apache.spark.graphx.EdgeRDD
Inner joins this EdgeRDD with another EdgeRDD, assuming both are partitioned using the same PartitionStrategy.
innerJoin(EdgeRDD<ED2>, Function4<Object, Object, ED, ED2, ED3>, ClassTag<ED2>, ClassTag<ED3>) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
innerJoin(RDD<Tuple2<Object, U>>, Function3<Object, VD, U, VD2>, ClassTag<U>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
innerJoin(RDD<Tuple2<Object, U>>, Function3<Object, VD, U, VD2>, ClassTag<U>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.VertexRDD
Inner joins this VertexRDD with an RDD containing vertex attribute pairs.
innerZipJoin(VertexRDD<U>, Function3<Object, VD, U, VD2>, ClassTag<U>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
innerZipJoin(VertexRDD<U>, Function3<Object, VD, U, VD2>, ClassTag<U>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.VertexRDD
Efficiently inner joins this VertexRDD with another VertexRDD sharing the same index.
INPUT() - Static method in class org.apache.spark.ui.ToolTips
 
input_file_name() - Static method in class org.apache.spark.sql.functions
Creates a string column for the file name of the current Spark task.
INPUT_METRICS_PREFIX() - Static method in class org.apache.spark.InternalAccumulator
 
inputBytes() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
inputBytes() - Method in class org.apache.spark.status.api.v1.StageData
 
inputBytes() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
inputBytes() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
inputCol() - Static method in class org.apache.spark.ml.feature.Binarizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.DCT
 
inputCol() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
inputCol() - Static method in class org.apache.spark.ml.feature.HashingTF
 
inputCol() - Static method in class org.apache.spark.ml.feature.IDF
 
inputCol() - Static method in class org.apache.spark.ml.feature.IDFModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.IndexToString
 
inputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
inputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
inputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.NGram
 
inputCol() - Static method in class org.apache.spark.ml.feature.Normalizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
inputCol() - Static method in class org.apache.spark.ml.feature.PCA
 
inputCol() - Static method in class org.apache.spark.ml.feature.PCAModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
inputCol() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
inputCol() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
inputCol() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
inputCol() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
inputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
inputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
inputCol() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
inputCol() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
inputCol() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
inputCols() - Static method in class org.apache.spark.ml.feature.Interaction
 
inputCols() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
inputDStream() - Method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
inputDStream() - Method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
inputDStream() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
inputDStream() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
InputDStream<T> - Class in org.apache.spark.streaming.dstream
This is the abstract base class for all input streams.
InputDStream(StreamingContext, ClassTag<T>) - Constructor for class org.apache.spark.streaming.dstream.InputDStream
 
InputFileNameHolder - Class in org.apache.spark.rdd
This holds file names of the current Spark task.
InputFileNameHolder() - Constructor for class org.apache.spark.rdd.InputFileNameHolder
 
inputFiles() - Method in class org.apache.spark.sql.Dataset
Returns a best-effort snapshot of the files that compose this Dataset.
inputFormat() - Method in class org.apache.spark.sql.internal.HiveSerDe
 
inputFormatCacheKey() - Method in class org.apache.spark.rdd.HadoopRDD
 
inputFormatClazz() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
inputFormatClazz() - Method in class org.apache.spark.scheduler.SplitInfo
 
InputFormatInfo - Class in org.apache.spark.scheduler
:: DeveloperApi :: Parses and holds information about inputFormat (and files) specified as a parameter.
InputFormatInfo(Configuration, Class<?>, String) - Constructor for class org.apache.spark.scheduler.InputFormatInfo
 
InputMetricDistributions - Class in org.apache.spark.status.api.v1
 
InputMetrics - Class in org.apache.spark.status.api.v1
 
inputMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
inputMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
inputRecords() - Method in class org.apache.spark.status.api.v1.StageData
 
inputRecords() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
inputRecords() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
inputSchema() - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
A StructType represents data types of input arguments of this aggregate function.
inputSet() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
inputStreamId() - Method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
inputTypes() - Method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
inRange(double, double, boolean, boolean) - Static method in class org.apache.spark.ml.param.ParamValidators
Check for value in range lowerBound to upperBound.
inRange(double, double) - Static method in class org.apache.spark.ml.param.ParamValidators
Version of inRange() which uses inclusive be default: [lowerBound, upperBound]
insert(Dataset<Row>, boolean) - Method in interface org.apache.spark.sql.sources.InsertableRelation
 
InsertableRelation - Interface in org.apache.spark.sql.sources
::DeveloperApi:: A BaseRelation that can be used to insert data into it through the insert method.
insertInto(String) - Method in class org.apache.spark.sql.DataFrameWriter
Inserts the content of the DataFrame to the specified table.
InsertIntoHiveTable - Class in org.apache.spark.sql.hive.execution
 
InsertIntoHiveTable(org.apache.spark.sql.hive.MetastoreRelation, Map<String, Option<String>>, SparkPlan, boolean, boolean) - Constructor for class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
inShutdown() - Static method in class org.apache.spark.util.ShutdownHookManager
Detect whether this thread might be executing a shutdown hook.
inspectorToDataType(ObjectInspector) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
instance() - Static method in class org.apache.spark.mllib.tree.impurity.Entropy
Get this impurity instance.
instance() - Static method in class org.apache.spark.mllib.tree.impurity.Gini
Get this impurity instance.
instance() - Static method in class org.apache.spark.mllib.tree.impurity.Variance
Get this impurity instance.
INSTANCE - Static variable in class org.apache.spark.serializer.DummySerializerInstance
 
instr(Column, String) - Static method in class org.apache.spark.sql.functions
Locate the position of the first occurrence of substr column in the given string.
INT() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable int type.
intAccumulator(int) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator integer variable, which tasks can "add" values to using the add method.
intAccumulator(int, String) - Method in class org.apache.spark.api.java.JavaSparkContext
Create an Accumulator integer variable, which tasks can "add" values to using the add method.
IntArrayParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Array[Int} for Java.
IntArrayParam(Params, String, String, Function1<int[], Object>) - Constructor for class org.apache.spark.ml.param.IntArrayParam
 
IntArrayParam(Params, String, String) - Constructor for class org.apache.spark.ml.param.IntArrayParam
 
IntegerType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the IntegerType object.
IntegerType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Int values.
INTER_JOB_WAIT_MS() - Static method in class org.apache.spark.ui.UIWorkloadGenerator
 
Interaction - Class in org.apache.spark.ml.feature
:: Experimental :: Implements the feature interaction transform.
Interaction(String) - Constructor for class org.apache.spark.ml.feature.Interaction
 
Interaction() - Constructor for class org.apache.spark.ml.feature.Interaction
 
intercept() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
intercept() - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
intercept() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
intercept() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
 
intercept() - Method in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$.Data
 
intercept() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
intercept() - Method in class org.apache.spark.mllib.classification.SVMModel
 
intercept() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearModel
 
intercept() - Method in class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$.Data
 
intercept() - Method in class org.apache.spark.mllib.regression.LassoModel
 
intercept() - Method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
intercept() - Method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
intermediateStorageLevel() - Static method in class org.apache.spark.ml.recommendation.ALS
 
InternalAccumulator - Class in org.apache.spark
A collection of fields and methods concerned with internal accumulators that represent task level metrics.
InternalAccumulator() - Constructor for class org.apache.spark.InternalAccumulator
 
InternalAccumulator.input$ - Class in org.apache.spark
 
InternalAccumulator.input$() - Constructor for class org.apache.spark.InternalAccumulator.input$
 
InternalAccumulator.output$ - Class in org.apache.spark
 
InternalAccumulator.output$() - Constructor for class org.apache.spark.InternalAccumulator.output$
 
InternalAccumulator.shuffleRead$ - Class in org.apache.spark
 
InternalAccumulator.shuffleRead$() - Constructor for class org.apache.spark.InternalAccumulator.shuffleRead$
 
InternalAccumulator.shuffleWrite$ - Class in org.apache.spark
 
InternalAccumulator.shuffleWrite$() - Constructor for class org.apache.spark.InternalAccumulator.shuffleWrite$
 
internalCreateDataFrame(RDD<InternalRow>, StructType) - Method in class org.apache.spark.sql.SparkSession
Creates a DataFrame from an RDD[Row].
InternalNode - Class in org.apache.spark.ml.tree
:: DeveloperApi :: Internal Decision Tree node.
InterruptibleIterator<T> - Class in org.apache.spark
:: DeveloperApi :: An iterator that wraps around an existing iterator to provide task killing functionality.
InterruptibleIterator(TaskContext, Iterator<T>) - Constructor for class org.apache.spark.InterruptibleIterator
 
interruptThread() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillTask
 
interruptThread() - Method in class org.apache.spark.scheduler.local.KillTask
 
intersect(Dataset<T>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset containing rows only in both this Dataset and another Dataset.
intersect(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
intersection(JavaDoubleRDD) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return the intersection of this RDD and another one.
intersection(JavaPairRDD<K, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return the intersection of this RDD and another one.
intersection(JavaRDD<T>) - Method in class org.apache.spark.api.java.JavaRDD
Return the intersection of this RDD and another one.
intersection(RDD<T>) - Static method in class org.apache.spark.api.r.RRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.api.r.RRDD
 
intersection(RDD<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
intersection(RDD<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
intersection(RDD<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
intersection(RDD<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.graphx.VertexRDD
 
intersection(RDD<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
intersection(RDD<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
intersection(RDD<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
intersection(RDD<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
intersection(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
intersection(RDD<T>, int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
intersection(RDD<T>) - Method in class org.apache.spark.rdd.RDD
Return the intersection of this RDD and another one.
intersection(RDD<T>, Partitioner, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Return the intersection of this RDD and another one.
intersection(RDD<T>, int) - Method in class org.apache.spark.rdd.RDD
Return the intersection of this RDD and another one.
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.api.r.RRDD
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.EdgeRDD
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.VertexRDD
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.HadoopRDD
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.JdbcRDD
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
intersection$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
intervalMs() - Method in class org.apache.spark.sql.ProcessingTime
 
IntParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Int] for Java.
IntParam(String, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.IntParam
 
IntParam(String, String, String) - Constructor for class org.apache.spark.ml.param.IntParam
 
IntParam(Identifiable, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.IntParam
 
IntParam(Identifiable, String, String) - Constructor for class org.apache.spark.ml.param.IntParam
 
IntParam - Class in org.apache.spark.util
An extractor object for parsing strings into integers.
IntParam() - Constructor for class org.apache.spark.util.IntParam
 
inverse() - Method in class org.apache.spark.ml.feature.DCT
Indicates whether to perform the inverse DCT (true) or forward DCT (false).
inverse(double[], int) - Static method in class org.apache.spark.mllib.linalg.CholeskyDecomposition
Computes the inverse of a real symmetric positive definite matrix A using the Cholesky factorization A = U**T*U.
invoke(Class<?>, Object, String, Seq<Tuple2<Class<?>, Object>>) - Static method in class org.apache.spark.util.Utils
 
invokedMethod(Object, Class<?>, String) - Static method in class org.apache.spark.graphx.util.BytecodeUtils
Test whether the given closure invokes the specified method in the specified class.
invokeWriteReplace(Object) - Method in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
is32BitDecimalType(DataType) - Static method in class org.apache.spark.sql.types.DecimalType
Returns if dt is a DecimalType that fits inside a int
is64BitDecimalType(DataType) - Static method in class org.apache.spark.sql.types.DecimalType
Returns if dt is a DecimalType that fits inside a long
isActive() - Method in interface org.apache.spark.sql.ContinuousQuery
Whether the query is currently active or not
isActive() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
isAddIntercept() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
isAddIntercept() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
isAddIntercept() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Get if the algorithm uses addIntercept
isAddIntercept() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
isAddIntercept() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
isAddIntercept() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
isAllowed(Enumeration.Value, Enumeration.Value) - Static method in class org.apache.spark.scheduler.TaskLocality
 
isBatchingEnabled(SparkConf, boolean) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
isBindCollision(Throwable) - Static method in class org.apache.spark.util.Utils
Return whether the exception is caused by an address-port collision when binding.
isBroadcast() - Method in class org.apache.spark.storage.BlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.RDDBlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.StreamBlockId
 
isBroadcast() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
isBucket() - Method in class org.apache.spark.sql.catalog.Column
 
isByteArrayDecimalType(DataType) - Static method in class org.apache.spark.sql.types.DecimalType
Returns if dt is a DecimalType that doesn't fit inside a long
isCached(String) - Method in class org.apache.spark.sql.catalog.Catalog
Returns true if the table is currently cached in-memory.
isCached(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns true if the table is currently cached in-memory.
isCached(Dataset<?>) - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns true if the Dataset is currently cached in-memory.
isCached(String) - Method in class org.apache.spark.sql.SQLContext
Returns true if the table is currently cached in-memory.
isCached() - Method in class org.apache.spark.storage.BlockStatus
 
isCached() - Method in class org.apache.spark.storage.RDDInfo
 
isCancelled() - Method in class org.apache.spark.ComplexFutureAction
 
isCancelled() - Method in interface org.apache.spark.FutureAction
Returns whether the action has been cancelled.
isCancelled() - Method in class org.apache.spark.SimpleFutureAction
 
isCheckpointed() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
isCheckpointed() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
isCheckpointed() - Static method in class org.apache.spark.api.java.JavaRDD
 
isCheckpointed() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return whether this RDD has been checkpointed or not
isCheckpointed() - Static method in class org.apache.spark.api.r.RRDD
 
isCheckpointed() - Static method in class org.apache.spark.graphx.EdgeRDD
 
isCheckpointed() - Method in class org.apache.spark.graphx.Graph
Return whether this Graph has been checkpointed or not.
isCheckpointed() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
isCheckpointed() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
isCheckpointed() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
isCheckpointed() - Static method in class org.apache.spark.graphx.VertexRDD
 
isCheckpointed() - Static method in class org.apache.spark.rdd.HadoopRDD
 
isCheckpointed() - Static method in class org.apache.spark.rdd.JdbcRDD
 
isCheckpointed() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
isCheckpointed() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
isCheckpointed() - Method in class org.apache.spark.rdd.RDD
Return whether this RDD is checkpointed and materialized, either reliably or locally.
isCompatible(BloomFilter) - Method in class org.apache.spark.util.sketch.BloomFilter
Determines whether a given bloom filter is compatible with this bloom filter.
isCompleted() - Method in class org.apache.spark.ComplexFutureAction
 
isCompleted() - Method in interface org.apache.spark.FutureAction
Returns whether the action has already been completed with a value or an exception.
isCompleted() - Method in class org.apache.spark.SimpleFutureAction
 
isCompleted() - Method in class org.apache.spark.TaskContext
Returns true if the task has completed.
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.LDA
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.DCT
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.IDF
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Interaction
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.NGram
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.PCA
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormula
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
isDefined(Param<?>) - Method in interface org.apache.spark.ml.param.Params
Checks whether a param is explicitly set or has a default value.
isDefined(Param<?>) - Static method in class org.apache.spark.ml.Pipeline
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.PipelineModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
isDefined(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
isDefinedAt(A) - Static method in class org.apache.spark.sql.types.StructType
 
isDistributed() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
isDistributed() - Method in class org.apache.spark.ml.clustering.LDAModel
Indicates whether this instance is of type DistributedLDAModel
isDistributed() - Method in class org.apache.spark.ml.clustering.LocalLDAModel
 
isDriver() - Method in class org.apache.spark.storage.BlockManagerId
 
isDynamicAllocationEnabled(SparkConf) - Static method in class org.apache.spark.util.Utils
Return whether dynamic allocation is enabled in the given conf Dynamic allocation and explicitly setting the number of executors are inherently incompatible.
isEmpty() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
isEmpty() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
isEmpty() - Static method in class org.apache.spark.api.java.JavaRDD
 
isEmpty() - Method in interface org.apache.spark.api.java.JavaRDDLike
 
isEmpty() - Static method in class org.apache.spark.api.r.RRDD
 
isEmpty() - Static method in class org.apache.spark.graphx.EdgeRDD
 
isEmpty() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
isEmpty() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
isEmpty() - Static method in class org.apache.spark.graphx.VertexRDD
 
isEmpty() - Static method in class org.apache.spark.rdd.HadoopRDD
 
isEmpty() - Static method in class org.apache.spark.rdd.JdbcRDD
 
isEmpty() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
isEmpty() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
isEmpty() - Method in class org.apache.spark.rdd.RDD
 
isEmpty() - Static method in class org.apache.spark.sql.types.StructType
 
isExecutorStartupConf(String) - Static method in class org.apache.spark.SparkConf
Return whether the given config should be passed to an executor on start-up.
isExperiment() - Method in class org.apache.spark.mllib.stat.test.BinarySample
 
isFailed(Enumeration.Value) - Static method in class org.apache.spark.TaskState
 
isFatalError(Throwable) - Static method in class org.apache.spark.util.Utils
Returns true if the given exception was fatal.
isFinal() - Method in enum org.apache.spark.launcher.SparkAppHandle.State
Whether this state is a final state, meaning the application is not running anymore once it's reached.
isFinished(Enumeration.Value) - Static method in class org.apache.spark.TaskState
 
isImmutable() - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
isin(Object...) - Method in class org.apache.spark.sql.Column
A boolean expression that is evaluated to true if the value of this expression is contained by the evaluated values of the arguments.
isin(Seq<Object>) - Method in class org.apache.spark.sql.Column
A boolean expression that is evaluated to true if the value of this expression is contained by the evaluated values of the arguments.
isInDirectory(File, File) - Static method in class org.apache.spark.util.Utils
Return whether the specified file is a parent directory of the child file.
isInitialValueFinal() - Method in class org.apache.spark.partial.PartialResult
 
isInterrupted() - Method in class org.apache.spark.TaskContext
Returns true if the task has been killed.
isLargerBetter() - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
isLargerBetter() - Method in class org.apache.spark.ml.evaluation.Evaluator
Indicates whether the metric returned by evaluate() should be maximized (true, default) or minimized (false).
isLargerBetter() - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
isLargerBetter() - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
isLeaf() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
isLeaf() - Method in class org.apache.spark.mllib.tree.model.Node
 
isLeftChild(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Returns true if this is a left child.
isLocal() - Method in class org.apache.spark.api.java.JavaSparkContext
 
isLocal() - Method in class org.apache.spark.SparkContext
 
isLocal() - Method in class org.apache.spark.sql.Dataset
Returns true if the collect and take methods can be run locally (without any Spark executors).
isLocalMaster(SparkConf) - Static method in class org.apache.spark.util.Utils
 
isMac() - Static method in class org.apache.spark.util.Utils
Whether the underlying operating system is Mac OS X.
isMulticlassClassification() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
isMulticlassWithCategoricalFeatures() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
isMultipleOf(Duration) - Method in class org.apache.spark.streaming.Duration
 
isMultipleOf(Duration) - Method in class org.apache.spark.streaming.Time
 
isNaN() - Method in class org.apache.spark.sql.Column
True if the current expression is NaN.
isnan(Column) - Static method in class org.apache.spark.sql.functions
Return true iff the column is NaN.
isNominal() - Method in class org.apache.spark.ml.attribute.Attribute
Tests whether this attribute is nominal, true for NominalAttribute and BinaryAttribute.
isNominal() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
isNominal() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
isNominal() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
isNominal() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
isNotNull() - Method in class org.apache.spark.sql.Column
True if the current expression is NOT null.
IsNotNull - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a non-null value.
IsNotNull(String) - Constructor for class org.apache.spark.sql.sources.IsNotNull
 
isNull() - Method in class org.apache.spark.sql.Column
True if the current expression is null.
isnull(Column) - Static method in class org.apache.spark.sql.functions
Return true iff the column is null.
IsNull - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to null.
IsNull(String) - Constructor for class org.apache.spark.sql.sources.IsNull
 
isNullAt(int) - Method in interface org.apache.spark.sql.Row
Checks whether the value at position i is null.
isNumeric() - Method in class org.apache.spark.ml.attribute.Attribute
Tests whether this attribute is numeric, true for NumericAttribute and BinaryAttribute.
isNumeric() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
isNumeric() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
isNumeric() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
isNumeric() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
isOrdinal() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
isotonic() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
isotonic() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
isotonic() - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
IsotonicRegression - Class in org.apache.spark.ml.regression
:: Experimental :: Isotonic regression.
IsotonicRegression(String) - Constructor for class org.apache.spark.ml.regression.IsotonicRegression
 
IsotonicRegression() - Constructor for class org.apache.spark.ml.regression.IsotonicRegression
 
IsotonicRegression - Class in org.apache.spark.mllib.regression
Isotonic regression.
IsotonicRegression() - Constructor for class org.apache.spark.mllib.regression.IsotonicRegression
Constructs IsotonicRegression instance with default parameter isotonic = true.
IsotonicRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental :: Model fitted by IsotonicRegression.
IsotonicRegressionModel - Class in org.apache.spark.mllib.regression
Regression model for isotonic regression.
IsotonicRegressionModel(double[], double[], boolean) - Constructor for class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
IsotonicRegressionModel(Iterable<Object>, Iterable<Object>, Boolean) - Constructor for class org.apache.spark.mllib.regression.IsotonicRegressionModel
A Java-friendly constructor that takes two Iterable parameters and one Boolean parameter.
isPartition() - Method in class org.apache.spark.sql.catalog.Column
 
isPresent() - Method in class org.apache.spark.api.java.Optional
 
isRDD() - Method in class org.apache.spark.storage.BlockId
 
isRDD() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
isRDD() - Static method in class org.apache.spark.storage.RDDBlockId
 
isRDD() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
isRDD() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
isRDD() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
isRDD() - Static method in class org.apache.spark.storage.StreamBlockId
 
isRDD() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
isRegistered() - Method in class org.apache.spark.util.AccumulatorV2
Returns true if this accumulator has been registered.
isRInstalled() - Static method in class org.apache.spark.api.r.RUtils
Check if R is installed before running tests that use R commands.
isRootContext() - Method in class org.apache.spark.sql.SQLContext
 
isRunningLocally() - Method in class org.apache.spark.TaskContext
Deprecated.
Local execution was removed, so this always returns false. Since 2.0.0.
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.LDA
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.DCT
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.IDF
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Interaction
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.NGram
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.PCA
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormula
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
isSet(Param<?>) - Method in interface org.apache.spark.ml.param.Params
Checks whether a param is explicitly set.
isSet(Param<?>) - Static method in class org.apache.spark.ml.Pipeline
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.PipelineModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
isSet(Param<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
isShuffle() - Method in class org.apache.spark.storage.BlockId
 
isShuffle() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
isShuffle() - Static method in class org.apache.spark.storage.RDDBlockId
 
isShuffle() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
isShuffle() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
isShuffle() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
isShuffle() - Static method in class org.apache.spark.storage.StreamBlockId
 
isShuffle() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
isSorted(int[]) - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel
 
isSparkPortConf(String) - Static method in class org.apache.spark.SparkConf
Return true if the given config matches either spark.*.port or spark.port.*.
isStarted() - Method in class org.apache.spark.streaming.receiver.Receiver
Check if the receiver has started or not.
isStopped() - Method in class org.apache.spark.SparkContext
 
isStopped() - Method in class org.apache.spark.streaming.receiver.Receiver
Check if receiver has been marked for stopping.
isStreaming() - Method in class org.apache.spark.sql.Dataset
Returns true if this Dataset contains one or more sources that continuously return data as it arrives.
isSymlink(File) - Static method in class org.apache.spark.util.Utils
Check to see if file is a symbolic link.
isTemporary() - Method in class org.apache.spark.sql.catalog.Function
 
isTemporary() - Method in class org.apache.spark.sql.catalog.Table
 
isTesting() - Static method in class org.apache.spark.util.Utils
Indicates whether Spark is currently running unit tests.
isTimingOut() - Method in class org.apache.spark.streaming.State
Whether the state is timing out and going to be removed by the system after the current batch.
isTraceEnabled() - Static method in class org.apache.spark.api.r.RRDD
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.EdgeRDD
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.GraphLoader
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.lib.PageRank
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.Pregel
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
isTraceEnabled() - Static method in class org.apache.spark.graphx.VertexRDD
 
isTraceEnabled() - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.KMeans
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.LDA
 
isTraceEnabled() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Binarizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.DCT
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.HashingTF
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.IDF
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.IDFModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.IndexToString
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Interaction
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.NGram
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Normalizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.PCA
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.PCAModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.RFormula
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
isTraceEnabled() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.Pipeline
 
isTraceEnabled() - Static method in class org.apache.spark.ml.PipelineModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.recommendation.ALS
 
isTraceEnabled() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
isTraceEnabled() - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
isTraceEnabled() - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
isTraceEnabled() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
isTraceEnabled() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
isTraceEnabled() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
isTraceEnabled() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.clustering.KMeans
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.clustering.LDA
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.linalg.BLAS
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.recommendation.ALS
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.tree.model.Node
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.tree.RandomForest
 
isTraceEnabled() - Static method in class org.apache.spark.mllib.util.DataValidators
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.HadoopRDD
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.JdbcRDD
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
isTraceEnabled() - Static method in class org.apache.spark.rdd.RDD
 
isTraceEnabled() - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
isTraceEnabled() - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
isTraceEnabled() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
isTraceEnabled() - Static method in class org.apache.spark.serializer.KryoSerializer
 
isTraceEnabled() - Static method in class org.apache.spark.serializer.SerializationDebugger
 
isTraceEnabled() - Static method in class org.apache.spark.SparkConf
 
isTraceEnabled() - Static method in class org.apache.spark.SparkContext
 
isTraceEnabled() - Static method in class org.apache.spark.SparkEnv
 
isTraceEnabled() - Static method in class org.apache.spark.sql.Column
 
isTraceEnabled() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
isTraceEnabled() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
isTraceEnabled() - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
isTraceEnabled() - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
isTraceEnabled() - Static method in class org.apache.spark.sql.SparkSession
 
isTraceEnabled() - Static method in class org.apache.spark.sql.SQLContext
 
isTraceEnabled() - Static method in class org.apache.spark.sql.types.UDTRegistration
 
isTraceEnabled() - Static method in class org.apache.spark.storage.StorageUtils
 
isTraceEnabled() - Static method in class org.apache.spark.streaming.CheckpointReader
 
isTraceEnabled() - Static method in class org.apache.spark.streaming.dstream.DStream
 
isTraceEnabled() - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
isTraceEnabled() - Static method in class org.apache.spark.streaming.StreamingContext
 
isTraceEnabled() - Static method in class org.apache.spark.streaming.util.RawTextSender
 
isTraceEnabled() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
isTraceEnabled() - Static method in class org.apache.spark.ui.JettyUtils
 
isTraceEnabled() - Static method in class org.apache.spark.ui.UIUtils
 
isTraceEnabled() - Static method in class org.apache.spark.util.ClosureCleaner
 
isTraceEnabled() - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
isTraceEnabled() - Static method in class org.apache.spark.util.ShutdownHookManager
 
isTraceEnabled() - Static method in class org.apache.spark.util.SignalUtils
 
isTraceEnabled() - Static method in class org.apache.spark.util.SizeEstimator
 
isTraceEnabled() - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
isTraceEnabled() - Static method in class org.apache.spark.util.Utils
 
isTransposed() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
isTransposed() - Method in interface org.apache.spark.ml.linalg.Matrix
Flag that keeps track whether the matrix is transposed or not.
isTransposed() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
isTransposed() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
isTransposed() - Method in interface org.apache.spark.mllib.linalg.Matrix
Flag that keeps track whether the matrix is transposed or not.
isTransposed() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
isTraversableAgain() - Static method in class org.apache.spark.sql.types.StructType
 
isValid() - Static method in class org.apache.spark.ml.param.DoubleParam
 
isValid() - Static method in class org.apache.spark.ml.param.FloatParam
 
isValid() - Method in class org.apache.spark.ml.param.Param
 
isValid() - Method in class org.apache.spark.storage.StorageLevel
 
isWindows() - Static method in class org.apache.spark.util.Utils
Whether the underlying operating system is Windows.
isZero() - Method in class org.apache.spark.sql.types.Decimal
 
isZero() - Method in class org.apache.spark.streaming.Duration
 
isZero() - Method in class org.apache.spark.util.AccumulatorV2
Returns if this accumulator is zero value or not.
isZero() - Method in class org.apache.spark.util.DoubleAccumulator
 
isZero() - Method in class org.apache.spark.util.LegacyAccumulatorWrapper
 
isZero() - Method in class org.apache.spark.util.ListAccumulator
 
isZero() - Method in class org.apache.spark.util.LongAccumulator
Adds v to the accumulator, i.e.
item() - Method in class org.apache.spark.ml.recommendation.ALS.Rating
 
itemCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
itemCol() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
itemFactors() - Method in class org.apache.spark.ml.recommendation.ALSModel
 
items() - Method in class org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.api.java.JavaRDD
 
iterator(Partition, TaskContext) - Method in interface org.apache.spark.api.java.JavaRDDLike
Internal method to this RDD; will read from cache if applicable, or otherwise compute it.
iterator(Partition, TaskContext) - Static method in class org.apache.spark.api.r.RRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.graphx.EdgeRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.graphx.VertexRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.rdd.HadoopRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.rdd.JdbcRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
iterator(Partition, TaskContext) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
iterator(Partition, TaskContext) - Method in class org.apache.spark.rdd.RDD
Internal method to this RDD; will read from cache if applicable, or otherwise compute it.
iterator() - Method in class org.apache.spark.sql.types.StructType
 

J

j() - Method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
jarOfClass(Class<?>) - Static method in class org.apache.spark.api.java.JavaSparkContext
Find the JAR from which a given class was loaded, to make it easy for users to pass their JARs to SparkContext.
jarOfClass(Class<?>) - Static method in class org.apache.spark.SparkContext
Find the JAR from which a given class was loaded, to make it easy for users to pass their JARs to SparkContext.
jarOfClass(Class<?>) - Static method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Find the JAR from which a given class was loaded, to make it easy for users to pass their JARs to StreamingContext.
jarOfClass(Class<?>) - Static method in class org.apache.spark.streaming.StreamingContext
Find the JAR from which a given class was loaded, to make it easy for users to pass their JARs to StreamingContext.
jarOfObject(Object) - Static method in class org.apache.spark.api.java.JavaSparkContext
Find the JAR that contains the class of a particular object, to make it easy for users to pass their JARs to SparkContext.
jarOfObject(Object) - Static method in class org.apache.spark.SparkContext
Find the JAR that contains the class of a particular object, to make it easy for users to pass their JARs to SparkContext.
jars() - Method in class org.apache.spark.api.java.JavaSparkContext
 
jars() - Method in class org.apache.spark.SparkContext
 
javaAntecedent() - Method in class org.apache.spark.mllib.fpm.AssociationRules.Rule
Returns antecedent in a Java List.
javaCategoryMaps() - Method in class org.apache.spark.ml.feature.VectorIndexerModel
Java-friendly version of categoryMaps
javaClassToDataType(Class<?>) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
javaConsequent() - Method in class org.apache.spark.mllib.fpm.AssociationRules.Rule
Returns consequent in a Java List.
JavaDoubleRDD - Class in org.apache.spark.api.java
 
JavaDoubleRDD(RDD<Object>) - Constructor for class org.apache.spark.api.java.JavaDoubleRDD
 
JavaDStream<T> - Class in org.apache.spark.streaming.api.java
A Java-friendly interface to DStream, the basic abstraction in Spark Streaming that represents a continuous stream of data.
JavaDStream(DStream<T>, ClassTag<T>) - Constructor for class org.apache.spark.streaming.api.java.JavaDStream
 
JavaDStreamLike<T,This extends JavaDStreamLike<T,This,R>,R extends JavaRDDLike<T,R>> - Interface in org.apache.spark.streaming.api.java
 
JavaFutureAction<T> - Interface in org.apache.spark.api.java
 
JavaHadoopRDD<K,V> - Class in org.apache.spark.api.java
 
JavaHadoopRDD(HadoopRDD<K, V>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.api.java.JavaHadoopRDD
 
JavaInputDStream<T> - Class in org.apache.spark.streaming.api.java
A Java-friendly interface to InputDStream.
JavaInputDStream(InputDStream<T>, ClassTag<T>) - Constructor for class org.apache.spark.streaming.api.java.JavaInputDStream
 
javaItems() - Method in class org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
Returns items in a Java List.
JavaIterableWrapperSerializer - Class in org.apache.spark.serializer
A Kryo serializer for serializing results returned by asJavaIterable.
JavaIterableWrapperSerializer() - Constructor for class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
JavaMapWithStateDStream<KeyType,ValueType,StateType,MappedType> - Class in org.apache.spark.streaming.api.java
:: Experimental :: DStream representing the stream of data generated by mapWithState operation on a JavaPairDStream.
JavaNewHadoopRDD<K,V> - Class in org.apache.spark.api.java
 
JavaNewHadoopRDD(NewHadoopRDD<K, V>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.api.java.JavaNewHadoopRDD
 
JavaPairDStream<K,V> - Class in org.apache.spark.streaming.api.java
A Java-friendly interface to a DStream of key-value pairs, which provides extra methods like reduceByKey and join.
JavaPairDStream(DStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.streaming.api.java.JavaPairDStream
 
JavaPairInputDStream<K,V> - Class in org.apache.spark.streaming.api.java
A Java-friendly interface to InputDStream of key-value pairs.
JavaPairInputDStream(InputDStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
JavaPairRDD<K,V> - Class in org.apache.spark.api.java
 
JavaPairRDD(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.api.java.JavaPairRDD
 
JavaPairReceiverInputDStream<K,V> - Class in org.apache.spark.streaming.api.java
A Java-friendly interface to ReceiverInputDStream, the abstract class for defining any input stream that receives data over the network.
JavaPairReceiverInputDStream(ReceiverInputDStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
JavaParams - Class in org.apache.spark.ml.param
:: DeveloperApi :: Java-friendly wrapper for Params.
JavaParams() - Constructor for class org.apache.spark.ml.param.JavaParams
 
JavaRDD<T> - Class in org.apache.spark.api.java
 
JavaRDD(RDD<T>, ClassTag<T>) - Constructor for class org.apache.spark.api.java.JavaRDD
 
javaRDD() - Method in class org.apache.spark.sql.Dataset
Returns the content of the Dataset as a JavaRDD of Rows.
JavaRDDLike<T,This extends JavaRDDLike<T,This>> - Interface in org.apache.spark.api.java
Defines operations common to several Java RDD implementations.
JavaReceiverInputDStream<T> - Class in org.apache.spark.streaming.api.java
A Java-friendly interface to ReceiverInputDStream, the abstract class for defining any input stream that receives data over the network.
JavaReceiverInputDStream(ReceiverInputDStream<T>, ClassTag<T>) - Constructor for class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
javaSequence() - Method in class org.apache.spark.mllib.fpm.PrefixSpan.FreqSequence
Returns sequence as a Java List of lists for Java users.
javaSerialization(ClassTag<T>) - Static method in class org.apache.spark.sql.Encoders
(Scala-specific) Creates an encoder that serializes objects of type T using generic Java serialization.
javaSerialization(Class<T>) - Static method in class org.apache.spark.sql.Encoders
Creates an encoder that serializes objects of type T using generic Java serialization.
JavaSerializer - Class in org.apache.spark.serializer
:: DeveloperApi :: A Spark serializer that uses Java's built-in serialization.
JavaSerializer(SparkConf) - Constructor for class org.apache.spark.serializer.JavaSerializer
 
JavaSerializer() - Constructor for class org.apache.spark.serializer.JavaSerializer
 
JavaSparkContext - Class in org.apache.spark.api.java
A Java-friendly version of SparkContext that returns JavaRDDs and works with Java collections instead of Scala ones.
JavaSparkContext(SparkContext) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkContext() - Constructor for class org.apache.spark.api.java.JavaSparkContext
Create a JavaSparkContext that loads settings from system properties (for instance, when launching with ./bin/spark-submit).
JavaSparkContext(SparkConf) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkContext(String, String) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkContext(String, String, SparkConf) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkContext(String, String, String, String) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkContext(String, String, String, String[]) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkContext(String, String, String, String[], Map<String, String>) - Constructor for class org.apache.spark.api.java.JavaSparkContext
 
JavaSparkStatusTracker - Class in org.apache.spark.api.java
Low-level status reporting APIs for monitoring job and stage progress.
JavaStreamingContext - Class in org.apache.spark.streaming.api.java
A Java-friendly version of StreamingContext which is the main entry point for Spark Streaming functionality.
JavaStreamingContext(StreamingContext) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
 
JavaStreamingContext(String, String, Duration) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a StreamingContext.
JavaStreamingContext(String, String, Duration, String, String) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a StreamingContext.
JavaStreamingContext(String, String, Duration, String, String[]) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a StreamingContext.
JavaStreamingContext(String, String, Duration, String, String[], Map<String, String>) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a StreamingContext.
JavaStreamingContext(JavaSparkContext, Duration) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a JavaStreamingContext using an existing JavaSparkContext.
JavaStreamingContext(SparkConf, Duration) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a JavaStreamingContext using a SparkConf configuration.
JavaStreamingContext(String) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Recreate a JavaStreamingContext from a checkpoint file.
JavaStreamingContext(String, Configuration) - Constructor for class org.apache.spark.streaming.api.java.JavaStreamingContext
Re-creates a JavaStreamingContext from a checkpoint file.
javaTopicAssignments() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Java-friendly version of topicAssignments
javaTopicDistributions() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Java-friendly version of topicDistributions
javaTopTopicsPerDocument(int) - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Java-friendly version of topTopicsPerDocument
javaToPython() - Method in class org.apache.spark.sql.Dataset
Converts a JavaRDD to a PythonRDD.
JavaUtils - Class in org.apache.spark.api.java
 
JavaUtils() - Constructor for class org.apache.spark.api.java.JavaUtils
 
JavaUtils.SerializableMapWrapper<A,B> - Class in org.apache.spark.api.java
 
JavaUtils.SerializableMapWrapper(Map<A, B>) - Constructor for class org.apache.spark.api.java.JavaUtils.SerializableMapWrapper
 
jdbc(String, String, Properties) - Method in class org.apache.spark.sql.DataFrameReader
Construct a DataFrame representing the database table accessible via JDBC URL url named table and connection properties.
jdbc(String, String, String, long, long, int, Properties) - Method in class org.apache.spark.sql.DataFrameReader
Construct a DataFrame representing the database table accessible via JDBC URL url named table.
jdbc(String, String, String[], Properties) - Method in class org.apache.spark.sql.DataFrameReader
Construct a DataFrame representing the database table accessible via JDBC URL url named table using connection properties.
jdbc(String, String, Properties) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame to a external database table via JDBC.
JdbcDialect - Class in org.apache.spark.sql.jdbc
:: DeveloperApi :: Encapsulates everything (extensions, workarounds, quirks) to handle the SQL dialect of a certain database or jdbc driver.
JdbcDialect() - Constructor for class org.apache.spark.sql.jdbc.JdbcDialect
 
JdbcDialects - Class in org.apache.spark.sql.jdbc
:: DeveloperApi :: Registry of dialects that apply to every new jdbc org.apache.spark.sql.DataFrame.
JdbcDialects() - Constructor for class org.apache.spark.sql.jdbc.JdbcDialects
 
jdbcNullType() - Method in class org.apache.spark.sql.jdbc.JdbcType
 
JdbcRDD<T> - Class in org.apache.spark.rdd
An RDD that executes an SQL query on a JDBC connection and reads results.
JdbcRDD(SparkContext, Function0<Connection>, String, long, long, int, Function1<ResultSet, T>, ClassTag<T>) - Constructor for class org.apache.spark.rdd.JdbcRDD
 
JdbcRDD.ConnectionFactory - Interface in org.apache.spark.rdd
 
JdbcType - Class in org.apache.spark.sql.jdbc
:: DeveloperApi :: A database type definition coupled with the jdbc type needed to send null values to the database.
JdbcType(String, int) - Constructor for class org.apache.spark.sql.jdbc.JdbcType
 
JettyUtils - Class in org.apache.spark.ui
Utilities for launching a web server using Jetty's HTTP Server class
JettyUtils() - Constructor for class org.apache.spark.ui.JettyUtils
 
JettyUtils.ServletParams<T> - Class in org.apache.spark.ui
 
JettyUtils.ServletParams(Function1<HttpServletRequest, T>, String, Function1<T, String>, Function1<T, Object>) - Constructor for class org.apache.spark.ui.JettyUtils.ServletParams
 
JettyUtils.ServletParams$ - Class in org.apache.spark.ui
 
JettyUtils.ServletParams$() - Constructor for class org.apache.spark.ui.JettyUtils.ServletParams$
 
JOB_DAG() - Static method in class org.apache.spark.ui.ToolTips
 
JOB_TIMELINE() - Static method in class org.apache.spark.ui.ToolTips
 
jobConfCacheKey() - Method in class org.apache.spark.rdd.HadoopRDD
 
JobData - Class in org.apache.spark.status.api.v1
 
jobEndFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
jobEndToJson(SparkListenerJobEnd) - Static method in class org.apache.spark.util.JsonProtocol
 
JobExecutionStatus - Enum in org.apache.spark
 
jobGroup() - Method in class org.apache.spark.status.api.v1.JobData
 
jobGroup() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
jobGroupToJobIds() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
jobId() - Method in class org.apache.spark.rdd.NewHadoopRDD
 
jobId() - Method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
jobId() - Method in class org.apache.spark.scheduler.SparkListenerJobStart
 
jobId() - Method in interface org.apache.spark.SparkJobInfo
 
jobId() - Method in class org.apache.spark.SparkJobInfoImpl
 
jobId() - Method in class org.apache.spark.status.api.v1.JobData
 
jobID() - Method in class org.apache.spark.TaskCommitDenied
 
jobId() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
jobIds() - Method in interface org.apache.spark.api.java.JavaFutureAction
Returns the job IDs run by the underlying async operation.
jobIds() - Method in class org.apache.spark.ComplexFutureAction
 
jobIds() - Method in interface org.apache.spark.FutureAction
Returns the job IDs run by the underlying async operation.
jobIds() - Method in class org.apache.spark.SimpleFutureAction
 
jobIdToData() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
JobProgressListener - Class in org.apache.spark.ui.jobs
:: DeveloperApi :: Tracks task-level information to be displayed in the UI.
JobProgressListener(SparkConf) - Constructor for class org.apache.spark.ui.jobs.JobProgressListener
 
JobResult - Interface in org.apache.spark.scheduler
:: DeveloperApi :: A result of a job in the DAGScheduler.
jobResult() - Method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
jobResultFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
jobResultToJson(JobResult) - Static method in class org.apache.spark.util.JsonProtocol
 
jobStartFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
jobStartToJson(SparkListenerJobStart) - Static method in class org.apache.spark.util.JsonProtocol
 
JobSubmitter - Interface in org.apache.spark
Handle via which a "run" function passed to a ComplexFutureAction can submit jobs for execution.
JobSucceeded - Class in org.apache.spark.scheduler
 
JobSucceeded() - Constructor for class org.apache.spark.scheduler.JobSucceeded
 
join(JavaPairRDD<K, W>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD containing all pairs of elements with matching keys in this and other.
join(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD containing all pairs of elements with matching keys in this and other.
join(JavaPairRDD<K, W>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD containing all pairs of elements with matching keys in this and other.
join(RDD<Tuple2<K, W>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD containing all pairs of elements with matching keys in this and other.
join(RDD<Tuple2<K, W>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD containing all pairs of elements with matching keys in this and other.
join(RDD<Tuple2<K, W>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD containing all pairs of elements with matching keys in this and other.
join(Dataset<?>) - Method in class org.apache.spark.sql.Dataset
Cartesian join with another DataFrame.
join(Dataset<?>, String) - Method in class org.apache.spark.sql.Dataset
Inner equi-join with another DataFrame using the given column.
join(Dataset<?>, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Inner equi-join with another DataFrame using the given columns.
join(Dataset<?>, Seq<String>, String) - Method in class org.apache.spark.sql.Dataset
Equi-join with another DataFrame using the given columns.
join(Dataset<?>, Column) - Method in class org.apache.spark.sql.Dataset
Inner join with another DataFrame, using the given join expression.
join(Dataset<?>, Column, String) - Method in class org.apache.spark.sql.Dataset
Join with another DataFrame, using the given join expression.
join(JavaPairDStream<K, W>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'join' between RDDs of this DStream and other DStream.
join(JavaPairDStream<K, W>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'join' between RDDs of this DStream and other DStream.
join(JavaPairDStream<K, W>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'join' between RDDs of this DStream and other DStream.
join(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
join(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
join(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
join(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
join(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
join(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
join(DStream<Tuple2<K, W>>, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'join' between RDDs of this DStream and other DStream.
join(DStream<Tuple2<K, W>>, int, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'join' between RDDs of this DStream and other DStream.
join(DStream<Tuple2<K, W>>, Partitioner, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'join' between RDDs of this DStream and other DStream.
joinVertices(RDD<Tuple2<Object, U>>, Function3<Object, VD, U, VD>, ClassTag<U>) - Method in class org.apache.spark.graphx.GraphOps
Join the vertices with an RDD and then apply a function from the vertex and RDD entry to a new vertex value.
joinWith(Dataset<U>, Column, String) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Joins this Dataset returning a Tuple2 for each pair where condition evaluates to true.
joinWith(Dataset<U>, Column) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Using inner equi-join to join this Dataset returning a Tuple2 for each pair where condition evaluates to true.
json(String...) - Method in class org.apache.spark.sql.DataFrameReader
Loads a JSON file (one object per line) and returns the result as a DataFrame.
json(Seq<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads a JSON file (one object per line) and returns the result as a DataFrame.
json(JavaRDD<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads an JavaRDD[String] storing JSON objects (one object per record) and returns the result as a DataFrame.
json(RDD<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads an RDD[String] storing JSON objects (one object per record) and returns the result as a DataFrame.
json(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame in JSON format at the specified path.
json() - Static method in class org.apache.spark.sql.types.ArrayType
 
json() - Static method in class org.apache.spark.sql.types.BinaryType
 
json() - Static method in class org.apache.spark.sql.types.BooleanType
 
json() - Static method in class org.apache.spark.sql.types.ByteType
 
json() - Static method in class org.apache.spark.sql.types.CalendarIntervalType
 
json() - Method in class org.apache.spark.sql.types.DataType
The compact JSON representation of this data type.
json() - Static method in class org.apache.spark.sql.types.DateType
 
json() - Static method in class org.apache.spark.sql.types.DecimalType
 
json() - Static method in class org.apache.spark.sql.types.DoubleType
 
json() - Static method in class org.apache.spark.sql.types.FloatType
 
json() - Static method in class org.apache.spark.sql.types.IntegerType
 
json() - Static method in class org.apache.spark.sql.types.LongType
 
json() - Static method in class org.apache.spark.sql.types.MapType
 
json() - Method in class org.apache.spark.sql.types.Metadata
Converts to its JSON representation.
json() - Static method in class org.apache.spark.sql.types.NullType
 
json() - Static method in class org.apache.spark.sql.types.NumericType
 
json() - Static method in class org.apache.spark.sql.types.ShortType
 
json() - Static method in class org.apache.spark.sql.types.StringType
 
json() - Static method in class org.apache.spark.sql.types.StructType
 
json() - Static method in class org.apache.spark.sql.types.TimestampType
 
json_tuple(Column, String...) - Static method in class org.apache.spark.sql.functions
Creates a new row for a json column according to the given field names.
json_tuple(Column, Seq<String>) - Static method in class org.apache.spark.sql.functions
Creates a new row for a json column according to the given field names.
jsonDecode(String) - Method in class org.apache.spark.ml.param.BooleanParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.DoubleArrayParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.DoubleParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.FloatParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.IntArrayParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.IntParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.LongParam
 
jsonDecode(String) - Method in class org.apache.spark.ml.param.Param
Decodes a param value from JSON.
jsonDecode(String) - Method in class org.apache.spark.ml.param.StringArrayParam
 
jsonEncode(boolean) - Method in class org.apache.spark.ml.param.BooleanParam
 
jsonEncode(double[]) - Method in class org.apache.spark.ml.param.DoubleArrayParam
 
jsonEncode(double) - Method in class org.apache.spark.ml.param.DoubleParam
 
jsonEncode(float) - Method in class org.apache.spark.ml.param.FloatParam
 
jsonEncode(int[]) - Method in class org.apache.spark.ml.param.IntArrayParam
 
jsonEncode(int) - Method in class org.apache.spark.ml.param.IntParam
 
jsonEncode(long) - Method in class org.apache.spark.ml.param.LongParam
 
jsonEncode(T) - Method in class org.apache.spark.ml.param.Param
Encodes a param value into JSON, which can be decoded by jsonDecode().
jsonEncode(String[]) - Method in class org.apache.spark.ml.param.StringArrayParam
 
jsonFields() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
jsonOption(JsonAST.JValue) - Static method in class org.apache.spark.util.Utils
Return an option that translates JNothing to None
JsonProtocol - Class in org.apache.spark.util
Serializes SparkListener events to/from JSON.
JsonProtocol() - Constructor for class org.apache.spark.util.JsonProtocol
 
jsonResponderToServlet(Function1<HttpServletRequest, JsonAST.JValue>) - Static method in class org.apache.spark.ui.JettyUtils
 
JsonVectorConverter - Class in org.apache.spark.ml.linalg
 
JsonVectorConverter() - Constructor for class org.apache.spark.ml.linalg.JsonVectorConverter
 
jValueDecode(JsonAST.JValue) - Static method in class org.apache.spark.ml.param.DoubleParam
Decodes a param value from JValue.
jValueDecode(JsonAST.JValue) - Static method in class org.apache.spark.ml.param.FloatParam
Decodes a param value from JValue.
jValueEncode(double) - Static method in class org.apache.spark.ml.param.DoubleParam
Encodes a param value into JValue.
jValueEncode(float) - Static method in class org.apache.spark.ml.param.FloatParam
Encodes a param value into JValue.
JVM_GC_TIME() - Static method in class org.apache.spark.InternalAccumulator
 
jvmGcTime() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
jvmGcTime() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
jvmInformation() - Method in class org.apache.spark.ui.env.EnvironmentListener
 
JVMObjectTracker - Class in org.apache.spark.api.r
Helper singleton that tracks JVM objects returned to R.
JVMObjectTracker() - Constructor for class org.apache.spark.api.r.JVMObjectTracker
 

K

k() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
k() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
k() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
k() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
k() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
k() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
 
k() - Static method in class org.apache.spark.ml.clustering.KMeans
 
k() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
k() - Method in class org.apache.spark.ml.clustering.KMeansSummary
 
k() - Static method in class org.apache.spark.ml.clustering.LDA
 
k() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
k() - Static method in class org.apache.spark.ml.feature.PCA
 
k() - Static method in class org.apache.spark.ml.feature.PCAModel
 
k() - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Number of leaf clusters.
k() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
k() - Method in class org.apache.spark.mllib.clustering.ExpectationSum
 
k() - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
Number of gaussians in mixture
k() - Method in class org.apache.spark.mllib.clustering.KMeansModel
Total number of clusters.
k() - Method in class org.apache.spark.mllib.clustering.LDAModel
Number of topics
k() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
k() - Method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel
 
k() - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
k() - Method in class org.apache.spark.mllib.feature.PCA
 
k() - Method in class org.apache.spark.mllib.feature.PCAModel
 
K_MEANS_PARALLEL() - Static method in class org.apache.spark.mllib.clustering.KMeans
 
KafkaCluster - Class in org.apache.spark.streaming.kafka
:: DeveloperApi :: Convenience methods for interacting with a Kafka cluster.
KafkaCluster(Map<String, String>) - Constructor for class org.apache.spark.streaming.kafka.KafkaCluster
 
KafkaCluster.LeaderOffset - Class in org.apache.spark.streaming.kafka
 
KafkaCluster.LeaderOffset(String, int, long) - Constructor for class org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset
 
KafkaCluster.LeaderOffset$ - Class in org.apache.spark.streaming.kafka
 
KafkaCluster.LeaderOffset$() - Constructor for class org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset$
 
KafkaCluster.SimpleConsumerConfig - Class in org.apache.spark.streaming.kafka
High-level kafka consumers connect to ZK.
KafkaCluster.SimpleConsumerConfig$ - Class in org.apache.spark.streaming.kafka
 
KafkaCluster.SimpleConsumerConfig$() - Constructor for class org.apache.spark.streaming.kafka.KafkaCluster.SimpleConsumerConfig$
 
kafkaParams() - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
KafkaUtils - Class in org.apache.spark.streaming.kafka
 
KafkaUtils() - Constructor for class org.apache.spark.streaming.kafka.KafkaUtils
 
kClassTag() - Method in class org.apache.spark.api.java.JavaHadoopRDD
 
kClassTag() - Method in class org.apache.spark.api.java.JavaNewHadoopRDD
 
kClassTag() - Method in class org.apache.spark.api.java.JavaPairRDD
 
kClassTag() - Method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
kClassTag() - Method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
keepLastCheckpoint() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
keepLastCheckpoint() - Static method in class org.apache.spark.ml.clustering.LDA
 
keepLastCheckpoint() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
KernelDensity - Class in org.apache.spark.mllib.stat
Kernel density estimation.
KernelDensity() - Constructor for class org.apache.spark.mllib.stat.KernelDensity
 
keyAs(Encoder<L>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Returns a new KeyValueGroupedDataset where the type of the key has been mapped to the specified type.
keyBy(Function<T, U>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
keyBy(Function<T, U>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
keyBy(Function<T, U>) - Static method in class org.apache.spark.api.java.JavaRDD
 
keyBy(Function<T, U>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Creates tuples of the elements in this RDD by applying f.
keyBy(Function1<T, K>) - Static method in class org.apache.spark.api.r.RRDD
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.graphx.VertexRDD
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
keyBy(Function1<T, K>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
keyBy(Function1<T, K>) - Method in class org.apache.spark.rdd.RDD
Creates tuples of the elements in this RDD by applying f.
keyOrdering() - Method in class org.apache.spark.ShuffleDependency
 
keys() - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the keys of each tuple.
keys() - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD with the keys of each tuple.
keys() - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Returns a Dataset that contains each unique key.
keyType() - Method in class org.apache.spark.sql.types.MapType
 
KeyValueGroupedDataset<K,V> - Class in org.apache.spark.sql
:: Experimental :: A Dataset has been logically grouped by a user specified grouping key.
kFold(RDD<T>, int, int, ClassTag<T>) - Static method in class org.apache.spark.mllib.util.MLUtils
Return a k element array of pairs of RDDs with the first element of each pair containing the training data, a complement of the validation data and the second element, the validation data, containing a unique 1/kth of the data.
kFold(RDD<T>, int, long, ClassTag<T>) - Static method in class org.apache.spark.mllib.util.MLUtils
Version of kFold() taking a Long seed.
kill() - Method in interface org.apache.spark.launcher.SparkAppHandle
Tries to kill the underlying application.
KILLED() - Static method in class org.apache.spark.TaskState
 
killExecutor(String) - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Request that the cluster manager kill the specified executor.
killExecutors(Seq<String>) - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Request that the cluster manager kill the specified executors.
KillTask - Class in org.apache.spark.scheduler.local
 
KillTask(long, boolean) - Constructor for class org.apache.spark.scheduler.local.KillTask
 
KinesisUtils - Class in org.apache.spark.streaming.kinesis
 
KinesisUtils() - Constructor for class org.apache.spark.streaming.kinesis.KinesisUtils
 
KinesisUtilsPythonHelper - Class in org.apache.spark.streaming.kinesis
This is a helper class that wraps the methods in KinesisUtils into more Python-friendly class and function so that it can be easily instantiated and called from Python's KinesisUtils.
KinesisUtilsPythonHelper() - Constructor for class org.apache.spark.streaming.kinesis.KinesisUtilsPythonHelper
 
kManifest() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
kManifest() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
kManifest() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
KMeans - Class in org.apache.spark.ml.clustering
:: Experimental :: K-means clustering with support for k-means|| initialization proposed by Bahmani et al.
KMeans(String) - Constructor for class org.apache.spark.ml.clustering.KMeans
 
KMeans() - Constructor for class org.apache.spark.ml.clustering.KMeans
 
KMeans - Class in org.apache.spark.mllib.clustering
K-means clustering with a k-means++ like initialization mode (the k-means|| algorithm by Bahmani et al).
KMeans() - Constructor for class org.apache.spark.mllib.clustering.KMeans
Constructs a KMeans instance with default parameters: {k: 2, maxIterations: 20, runs: 1, initializationMode: "k-means||", initializationSteps: 5, epsilon: 1e-4, seed: random}.
KMeansDataGenerator - Class in org.apache.spark.mllib.util
:: DeveloperApi :: Generate test data for KMeans.
KMeansDataGenerator() - Constructor for class org.apache.spark.mllib.util.KMeansDataGenerator
 
KMeansModel - Class in org.apache.spark.ml.clustering
:: Experimental :: Model fitted by KMeans.
KMeansModel - Class in org.apache.spark.mllib.clustering
A clustering model for K-means.
KMeansModel(Vector[]) - Constructor for class org.apache.spark.mllib.clustering.KMeansModel
 
KMeansModel(Iterable<Vector>) - Constructor for class org.apache.spark.mllib.clustering.KMeansModel
A Java-friendly constructor that takes an Iterable of Vectors.
KMeansModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.clustering
 
KMeansModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.clustering.KMeansModel.SaveLoadV1_0$
 
kMeansPlusPlus(int, VectorWithNorm[], double[], int, int) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
Run K-means++ on the weighted point set points.
KMeansSummary - Class in org.apache.spark.ml.clustering
:: Experimental :: Summary of KMeans.
kolmogorovSmirnovTest(RDD<Object>, String, double...) - Static method in class org.apache.spark.mllib.stat.Statistics
Convenience function to conduct a one-sample, two-sided Kolmogorov-Smirnov test for probability distribution equality.
kolmogorovSmirnovTest(JavaDoubleRDD, String, double...) - Static method in class org.apache.spark.mllib.stat.Statistics
Java-friendly version of kolmogorovSmirnovTest()
kolmogorovSmirnovTest(RDD<Object>, Function1<Object, Object>) - Static method in class org.apache.spark.mllib.stat.Statistics
Conduct the two-sided Kolmogorov-Smirnov (KS) test for data sampled from a continuous distribution.
kolmogorovSmirnovTest(RDD<Object>, String, Seq<Object>) - Static method in class org.apache.spark.mllib.stat.Statistics
Convenience function to conduct a one-sample, two-sided Kolmogorov-Smirnov test for probability distribution equality.
kolmogorovSmirnovTest(JavaDoubleRDD, String, Seq<Object>) - Static method in class org.apache.spark.mllib.stat.Statistics
Java-friendly version of kolmogorovSmirnovTest()
KolmogorovSmirnovTest - Class in org.apache.spark.mllib.stat.test
Conduct the two-sided Kolmogorov Smirnov (KS) test for data sampled from a continuous distribution.
KolmogorovSmirnovTest() - Constructor for class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
KolmogorovSmirnovTest.NullHypothesis$ - Class in org.apache.spark.mllib.stat.test
 
KolmogorovSmirnovTest.NullHypothesis$() - Constructor for class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest.NullHypothesis$
 
KolmogorovSmirnovTestResult - Class in org.apache.spark.mllib.stat.test
:: Experimental :: Object containing the test results for the Kolmogorov-Smirnov test.
kryo(ClassTag<T>) - Static method in class org.apache.spark.sql.Encoders
(Scala-specific) Creates an encoder that serializes objects of type T using Kryo.
kryo(Class<T>) - Static method in class org.apache.spark.sql.Encoders
Creates an encoder that serializes objects of type T using Kryo.
KryoRegistrator - Interface in org.apache.spark.serializer
Interface implemented by clients to register their classes with Kryo when using Kryo serialization.
KryoSerializer - Class in org.apache.spark.serializer
A Spark serializer that uses the Kryo serialization library.
KryoSerializer(SparkConf) - Constructor for class org.apache.spark.serializer.KryoSerializer
 
kurtosis(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the kurtosis of the values in a group.
kurtosis(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the kurtosis of the values in a group.

L

L1Updater - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Updater for L1 regularized problems.
L1Updater() - Constructor for class org.apache.spark.mllib.optimization.L1Updater
 
label() - Method in class org.apache.spark.ml.feature.LabeledPoint
 
label() - Method in class org.apache.spark.mllib.regression.LabeledPoint
 
labelCol() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
 
labelCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
labelCol() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
labelCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
labelCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
labelCol() - Method in interface org.apache.spark.ml.classification.LogisticRegressionSummary
Field in "predictions" which gives the true label of each instance (if available).
labelCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
labelCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
labelCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
labelCol() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
labelCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
labelCol() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
labelCol() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
labelCol() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
labelCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
labelCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
labelCol() - Static method in class org.apache.spark.ml.feature.RFormula
 
labelCol() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
labelCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
labelCol() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
labelCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
labelCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
labelCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
labelCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
labelCol() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
 
labelCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
labelCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
LabelConverter - Class in org.apache.spark.ml.classification
Label to vector converter.
LabelConverter() - Constructor for class org.apache.spark.ml.classification.LabelConverter
 
LabeledPoint - Class in org.apache.spark.ml.feature
Class that represents the features and labels of a data point.
LabeledPoint(double, Vector) - Constructor for class org.apache.spark.ml.feature.LabeledPoint
 
LabeledPoint - Class in org.apache.spark.mllib.regression
Class that represents the features and labels of a data point.
LabeledPoint(double, Vector) - Constructor for class org.apache.spark.mllib.regression.LabeledPoint
 
LabelPropagation - Class in org.apache.spark.graphx.lib
Label Propagation algorithm.
LabelPropagation() - Constructor for class org.apache.spark.graphx.lib.LabelPropagation
 
labels() - Method in class org.apache.spark.ml.feature.IndexToString
Optional param for array of labels specifying index-string mapping.
labels() - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
labels() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
labels() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$.Data
 
labels() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$.Data
 
labels() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns the sequence of labels in ascending order
labels() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns the sequence of labels in ascending order
lag(Column, int) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows before the current row, and null if there is less than offset rows before the current row.
lag(String, int) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows before the current row, and null if there is less than offset rows before the current row.
lag(String, int, Object) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows before the current row, and defaultValue if there is less than offset rows before the current row.
lag(Column, int, Object) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows before the current row, and defaultValue if there is less than offset rows before the current row.
LassoModel - Class in org.apache.spark.mllib.regression
Regression model trained using Lasso.
LassoModel(Vector, double) - Constructor for class org.apache.spark.mllib.regression.LassoModel
 
LassoWithSGD - Class in org.apache.spark.mllib.regression
Deprecated.
Use ml.regression.LinearRegression with elasticNetParam = 1.0. Note the default regParam is 0.01 for LassoWithSGD, but is 0.0 for LinearRegression. Since 2.0.0.
LassoWithSGD() - Constructor for class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
Construct a Lasso object with default parameters: {stepSize: 1.0, numIterations: 100, regParam: 0.01, miniBatchFraction: 1.0}.
last(Column, boolean) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the last value in a group.
last(String, boolean) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the last value of the column in a group.
last(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the last value in a group.
last(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the last value of the column in a group.
last() - Static method in class org.apache.spark.sql.types.StructType
 
last_day(Column) - Static method in class org.apache.spark.sql.functions
Given a date column, returns the last day of the month which the given date belongs to.
lastDir() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
lastError() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
lastErrorMessage() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
lastErrorTime() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
lastIndexOf(B) - Static method in class org.apache.spark.sql.types.StructType
 
lastIndexOf(B, int) - Static method in class org.apache.spark.sql.types.StructType
 
lastIndexOfSlice(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
lastIndexOfSlice(GenSeq<B>, int) - Static method in class org.apache.spark.sql.types.StructType
 
lastIndexWhere(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
lastIndexWhere(Function1<A, Object>, int) - Static method in class org.apache.spark.sql.types.StructType
 
lastOption() - Static method in class org.apache.spark.sql.types.StructType
 
lastUpdated() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
latestModel() - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Return the latest model.
latestModel() - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Return the latest model.
launch() - Method in class org.apache.spark.launcher.SparkLauncher
Launches a sub-process that will start the configured Spark application.
LAUNCHING() - Static method in class org.apache.spark.TaskState
 
launchTime() - Method in class org.apache.spark.scheduler.TaskInfo
 
launchTime() - Method in class org.apache.spark.status.api.v1.TaskData
 
layers() - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
layers() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
LBFGS - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Class used to solve an optimization problem using Limited-memory BFGS.
LBFGS(Gradient, Updater) - Constructor for class org.apache.spark.mllib.optimization.LBFGS
 
LDA - Class in org.apache.spark.ml.clustering
:: Experimental ::
LDA(String) - Constructor for class org.apache.spark.ml.clustering.LDA
 
LDA() - Constructor for class org.apache.spark.ml.clustering.LDA
 
LDA - Class in org.apache.spark.mllib.clustering
Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
LDA() - Constructor for class org.apache.spark.mllib.clustering.LDA
Constructs a LDA instance with default parameters.
LDAModel - Class in org.apache.spark.ml.clustering
:: Experimental :: Model fitted by LDA.
LDAModel - Class in org.apache.spark.mllib.clustering
Latent Dirichlet Allocation (LDA) model.
LDAOptimizer - Interface in org.apache.spark.mllib.clustering
:: DeveloperApi ::
LDAUtils - Class in org.apache.spark.mllib.clustering
Utility methods for LDA.
LDAUtils() - Constructor for class org.apache.spark.mllib.clustering.LDAUtils
 
lead(String, int) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows after the current row, and null if there is less than offset rows after the current row.
lead(Column, int) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows after the current row, and null if there is less than offset rows after the current row.
lead(String, int, Object) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows after the current row, and defaultValue if there is less than offset rows after the current row.
lead(Column, int, Object) - Static method in class org.apache.spark.sql.functions
Window function: returns the value that is offset rows after the current row, and defaultValue if there is less than offset rows after the current row.
LeafNode - Class in org.apache.spark.ml.tree
:: DeveloperApi :: Decision tree leaf node.
learningDecay() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
learningDecay() - Static method in class org.apache.spark.ml.clustering.LDA
 
learningDecay() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
learningOffset() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
learningOffset() - Static method in class org.apache.spark.ml.clustering.LDA
 
learningOffset() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
learningRate() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
least(Column...) - Static method in class org.apache.spark.sql.functions
Returns the least value of the list of values, skipping null values.
least(String, String...) - Static method in class org.apache.spark.sql.functions
Returns the least value of the list of column names, skipping null values.
least(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Returns the least value of the list of values, skipping null values.
least(String, Seq<String>) - Static method in class org.apache.spark.sql.functions
Returns the least value of the list of column names, skipping null values.
LeastSquaresAggregator - Class in org.apache.spark.ml.regression
LeastSquaresAggregator computes the gradient and loss for a Least-squared loss function, as used in linear regression for samples in sparse or dense vector in a online fashion.
LeastSquaresAggregator(Vector, double, double, boolean, double[], double[]) - Constructor for class org.apache.spark.ml.regression.LeastSquaresAggregator
 
LeastSquaresCostFun - Class in org.apache.spark.ml.regression
LeastSquaresCostFun implements Breeze's DiffFunction[T] for Least Squares cost.
LeastSquaresCostFun(RDD<org.apache.spark.ml.feature.Instance>, double, double, boolean, boolean, double[], double[], double) - Constructor for class org.apache.spark.ml.regression.LeastSquaresCostFun
 
LeastSquaresGradient - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Compute gradient and loss for a Least-squared loss function, as used in linear regression.
LeastSquaresGradient() - Constructor for class org.apache.spark.mllib.optimization.LeastSquaresGradient
 
left() - Method in class org.apache.spark.sql.sources.And
 
left() - Method in class org.apache.spark.sql.sources.Or
 
leftCategories() - Method in class org.apache.spark.ml.tree.CategoricalSplit
Get sorted categories which split to the left
leftCategoriesOrThreshold() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData
 
leftChild() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
leftChild() - Method in class org.apache.spark.ml.tree.InternalNode
 
leftChildIndex(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Return the index of the left child of this node.
leftImpurity() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
leftJoin(RDD<Tuple2<Object, VD2>>, Function3<Object, VD, Option<VD2>, VD3>, ClassTag<VD2>, ClassTag<VD3>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
leftJoin(RDD<Tuple2<Object, VD2>>, Function3<Object, VD, Option<VD2>, VD3>, ClassTag<VD2>, ClassTag<VD3>) - Method in class org.apache.spark.graphx.VertexRDD
Left joins this VertexRDD with an RDD containing vertex attribute pairs.
leftNode() - Method in class org.apache.spark.mllib.tree.model.Node
 
leftNodeId() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
leftOuterJoin(JavaPairRDD<K, W>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a left outer join of this and other.
leftOuterJoin(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a left outer join of this and other.
leftOuterJoin(JavaPairRDD<K, W>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a left outer join of this and other.
leftOuterJoin(RDD<Tuple2<K, W>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a left outer join of this and other.
leftOuterJoin(RDD<Tuple2<K, W>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a left outer join of this and other.
leftOuterJoin(RDD<Tuple2<K, W>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a left outer join of this and other.
leftOuterJoin(JavaPairDStream<K, W>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'left outer join' between RDDs of this DStream and other DStream.
leftOuterJoin(JavaPairDStream<K, W>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'left outer join' between RDDs of this DStream and other DStream.
leftOuterJoin(JavaPairDStream<K, W>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'left outer join' between RDDs of this DStream and other DStream.
leftOuterJoin(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
leftOuterJoin(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
leftOuterJoin(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
leftOuterJoin(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
leftOuterJoin(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
leftOuterJoin(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
leftOuterJoin(DStream<Tuple2<K, W>>, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'left outer join' between RDDs of this DStream and other DStream.
leftOuterJoin(DStream<Tuple2<K, W>>, int, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'left outer join' between RDDs of this DStream and other DStream.
leftOuterJoin(DStream<Tuple2<K, W>>, Partitioner, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'left outer join' between RDDs of this DStream and other DStream.
leftPredict() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
leftZipJoin(VertexRDD<VD2>, Function3<Object, VD, Option<VD2>, VD3>, ClassTag<VD2>, ClassTag<VD3>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
leftZipJoin(VertexRDD<VD2>, Function3<Object, VD, Option<VD2>, VD3>, ClassTag<VD2>, ClassTag<VD3>) - Method in class org.apache.spark.graphx.VertexRDD
Left joins this RDD with another VertexRDD with the same index.
LegacyAccumulatorWrapper<R,T> - Class in org.apache.spark.util
 
LegacyAccumulatorWrapper(R, AccumulableParam<R, T>) - Constructor for class org.apache.spark.util.LegacyAccumulatorWrapper
 
length() - Method in class org.apache.spark.scheduler.SplitInfo
 
length(Column) - Static method in class org.apache.spark.sql.functions
Computes the length of a given string or binary column.
length() - Method in interface org.apache.spark.sql.Row
Number of elements in the Row.
length() - Method in class org.apache.spark.sql.types.StructType
 
lengthCompare(int) - Static method in class org.apache.spark.sql.types.StructType
 
leq(Object) - Method in class org.apache.spark.sql.Column
Less than or equal to.
less(Duration) - Method in class org.apache.spark.streaming.Duration
 
less(Time) - Method in class org.apache.spark.streaming.Time
 
lessEq(Duration) - Method in class org.apache.spark.streaming.Duration
 
lessEq(Time) - Method in class org.apache.spark.streaming.Time
 
LessThan - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a value less than value.
LessThan(String, Object) - Constructor for class org.apache.spark.sql.sources.LessThan
 
LessThanOrEqual - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a value less than or equal to value.
LessThanOrEqual(String, Object) - Constructor for class org.apache.spark.sql.sources.LessThanOrEqual
 
levenshtein(Column, Column) - Static method in class org.apache.spark.sql.functions
Computes the Levenshtein distance of the two given string columns.
libraryPathEnvName() - Static method in class org.apache.spark.util.Utils
Return the current system LD_LIBRARY_PATH name
libraryPathEnvPrefix(Seq<String>) - Static method in class org.apache.spark.util.Utils
Return the prefix of a command that appends the given library paths to the system-specific library path environment variable.
lift() - Static method in class org.apache.spark.sql.types.StructType
 
like(String) - Method in class org.apache.spark.sql.Column
SQL like expression.
limit(int) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset by taking the first n rows.
line() - Method in exception org.apache.spark.sql.AnalysisException
 
LinearDataGenerator - Class in org.apache.spark.mllib.util
:: DeveloperApi :: Generate sample data used for Linear Data.
LinearDataGenerator() - Constructor for class org.apache.spark.mllib.util.LinearDataGenerator
 
LinearRegression - Class in org.apache.spark.ml.regression
:: Experimental :: Linear regression.
LinearRegression(String) - Constructor for class org.apache.spark.ml.regression.LinearRegression
 
LinearRegression() - Constructor for class org.apache.spark.ml.regression.LinearRegression
 
LinearRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental :: Model produced by LinearRegression.
LinearRegressionModel - Class in org.apache.spark.mllib.regression
Regression model trained using LinearRegression.
LinearRegressionModel(Vector, double) - Constructor for class org.apache.spark.mllib.regression.LinearRegressionModel
 
LinearRegressionSummary - Class in org.apache.spark.ml.regression
:: Experimental :: Linear regression results evaluated on a dataset.
LinearRegressionTrainingSummary - Class in org.apache.spark.ml.regression
:: Experimental :: Linear regression training results.
LinearRegressionWithSGD - Class in org.apache.spark.mllib.regression
Deprecated.
Use ml.regression.LinearRegression or LBFGS. Since 2.0.0.
LinearRegressionWithSGD() - Constructor for class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
Construct a LinearRegression object with default parameters: {stepSize: 1.0, numIterations: 100, miniBatchFraction: 1.0}.
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.CLogLog$
 
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Identity$
 
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Inverse$
 
link() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Log$
 
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Logit$
 
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Probit$
 
link(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Sqrt$
 
link() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
linkObj() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
linkPredictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
linkPredictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
listAccumulator() - Method in class org.apache.spark.SparkContext
Create and register a list accumulator, which starts with empty list and accumulates inputs by adding them into the inner list.
listAccumulator(String) - Method in class org.apache.spark.SparkContext
Create and register a list accumulator, which starts with empty list and accumulates inputs by adding them into the inner list.
ListAccumulator<T> - Class in org.apache.spark.util
 
ListAccumulator() - Constructor for class org.apache.spark.util.ListAccumulator
 
listColumns(String) - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of columns for the given table in the current database.
listColumns(String, String) - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of columns for the given table in the specified database.
listColumns(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of columns for the given table in the current database.
listColumns(String, String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of columns for the given table in the specified database.
listDatabases() - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of databases available across all sessions.
listDatabases() - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of databases available across all sessions.
listener() - Method in class org.apache.spark.sql.SparkSession
 
listener() - Method in class org.apache.spark.sql.SQLContext
 
listenerManager() - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: An interface to register custom QueryExecutionListeners that listen for execution metrics.
listenerManager() - Method in class org.apache.spark.sql.SQLContext
An interface to register custom QueryExecutionListeners that listen for execution metrics.
listFunctions() - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of functions registered in the current database.
listFunctions(String) - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of functions registered in the specified database.
listFunctions() - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of functions registered in the current database.
listFunctions(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of functions registered in the specified database.
listingTable(Seq<String>, Function1<T, Seq<Node>>, Iterable<T>, boolean, Option<String>, Seq<String>, boolean, boolean) - Static method in class org.apache.spark.ui.UIUtils
Returns an HTML table constructed by generating a row for each object in a sequence.
listOrcFiles(String, Configuration) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
listTables() - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of tables in the current database.
listTables(String) - Method in class org.apache.spark.sql.catalog.Catalog
Returns a list of tables in the specified database.
listTables() - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of tables in the current database.
listTables(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Returns a list of tables in the specified database.
lit(Object) - Static method in class org.apache.spark.sql.functions
Creates a Column of literal value.
literal(String) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
load(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
load(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
load(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
load(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
load(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
load(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
load(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
load(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
load(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
load(String) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
load(String) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
load(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
load(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
load(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
load(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
load(String) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
load(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
load(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
load(String) - Static method in class org.apache.spark.ml.clustering.KMeans
 
load(String) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
load(String) - Static method in class org.apache.spark.ml.clustering.LDA
 
load(String) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
load(String) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
load(String) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
load(String) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
load(String) - Static method in class org.apache.spark.ml.feature.Binarizer
 
load(String) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
load(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
load(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
load(String) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
load(String) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
load(String) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
load(String) - Static method in class org.apache.spark.ml.feature.DCT
 
load(String) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
load(String) - Static method in class org.apache.spark.ml.feature.HashingTF
 
load(String) - Static method in class org.apache.spark.ml.feature.IDF
 
load(String) - Static method in class org.apache.spark.ml.feature.IDFModel
 
load(String) - Static method in class org.apache.spark.ml.feature.IndexToString
 
load(String) - Static method in class org.apache.spark.ml.feature.Interaction
 
load(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
load(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
load(String) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
load(String) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
load(String) - Static method in class org.apache.spark.ml.feature.NGram
 
load(String) - Static method in class org.apache.spark.ml.feature.Normalizer
 
load(String) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
load(String) - Static method in class org.apache.spark.ml.feature.PCA
 
load(String) - Static method in class org.apache.spark.ml.feature.PCAModel
 
load(String) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
load(String) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
load(String) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
load(String) - Static method in class org.apache.spark.ml.feature.RFormula
 
load(String) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
load(String) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
load(String) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
load(String) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
load(String) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
load(String) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
load(String) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
load(String) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
load(String) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
load(String) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
load(String) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
load(String) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
load(String) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
load(String) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
load(String) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
load(String) - Static method in class org.apache.spark.ml.Pipeline
 
load(String, SparkContext, String) - Method in class org.apache.spark.ml.Pipeline.SharedReadWrite$
Load metadata and stages for a Pipeline or PipelineModel
load(String) - Static method in class org.apache.spark.ml.PipelineModel
 
load(String) - Static method in class org.apache.spark.ml.r.RWrappers
 
load(String) - Static method in class org.apache.spark.ml.recommendation.ALS
 
load(String) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
load(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
load(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
load(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
load(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
load(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
load(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
load(String) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
load(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
load(String) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
load(String) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
load(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
load(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
load(String) - Method in interface org.apache.spark.ml.util.MLReadable
Reads an ML instance from the input path, a shortcut of read.load(path).
load(String) - Method in class org.apache.spark.ml.util.MLReader
Loads the ML component from the input path.
load(SparkContext, String) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
load(SparkContext, String, int) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.KMeansModel
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.KMeansModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.feature.ChiSqSelectorModel
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.feature.Word2VecModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.fpm.FPGrowthModel
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.fpm.FPGrowthModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.fpm.PrefixSpanModel
 
load(SparkContext, String) - Method in class org.apache.spark.mllib.fpm.PrefixSpanModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Load a model from the given path.
load(SparkContext, String) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
 
load(SparkContext, String, String, int) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
load(SparkContext, String) - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
load(SparkContext, String) - Method in interface org.apache.spark.mllib.util.Loader
Load a model from the given path.
load(String...) - Method in class org.apache.spark.sql.DataFrameReader
Loads input in as a DataFrame, for data sources that support multiple paths.
load() - Method in class org.apache.spark.sql.DataFrameReader
Loads input in as a DataFrame, for data sources that don't require a path (e.g.
load(String) - Method in class org.apache.spark.sql.DataFrameReader
Loads input in as a DataFrame, for data sources that require a path (e.g.
load(Seq<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads input in as a DataFrame, for data sources that support multiple paths.
loadData(SparkContext, String, String) - Method in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$
Helper method for loading GLM classification model data.
loadData(SparkContext, String, String, int) - Method in class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$
Helper method for loading GLM regression model data.
loadDefaultSparkProperties(SparkConf, String) - Static method in class org.apache.spark.util.Utils
Load default Spark properties from the given file.
loadDefaultStopWords(String) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
Loads the default stop words for the given language.
loadDF(SQLContext, String, Map<String, String>) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
loadDF(SQLContext, String, StructType, Map<String, String>) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
Loader<M extends Saveable> - Interface in org.apache.spark.mllib.util
:: DeveloperApi ::
loadImpl(String, SQLContext, String, String) - Static method in class org.apache.spark.ml.tree.EnsembleModelReadWrite
Helper method for loading a tree ensemble from disk.
loadImpl(Dataset<Row>, Item, ClassTag<Item>) - Method in class org.apache.spark.mllib.fpm.FPGrowthModel.SaveLoadV1_0$
 
loadImpl(Dataset<Row>, Item, ClassTag<Item>) - Method in class org.apache.spark.mllib.fpm.PrefixSpanModel.SaveLoadV1_0$
 
loadLabeledPoints(SparkContext, String, int) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads labeled points saved using RDD[LabeledPoint].saveAsTextFile.
loadLabeledPoints(SparkContext, String) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads labeled points saved using RDD[LabeledPoint].saveAsTextFile with the default number of partitions.
loadLibSVMFile(SparkContext, String, int, int) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads labeled data in the LIBSVM format into an RDD[LabeledPoint].
loadLibSVMFile(SparkContext, String, int) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads labeled data in the LIBSVM format into an RDD[LabeledPoint], with the default number of partitions.
loadLibSVMFile(SparkContext, String) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads binary labeled data in the LIBSVM format into an RDD[LabeledPoint], with number of features determined automatically and the default number of partitions.
loadTreeNodes(String, org.apache.spark.ml.util.DefaultParamsReader.Metadata, SQLContext) - Static method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite
Load a decision tree from a file.
loadVectors(SparkContext, String, int) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads vectors saved using RDD[Vector].saveAsTextFile.
loadVectors(SparkContext, String) - Static method in class org.apache.spark.mllib.util.MLUtils
Loads vectors saved using RDD[Vector].saveAsTextFile with the default number of partitions.
LOCAL_BLOCKS_FETCHED() - Method in class org.apache.spark.InternalAccumulator.shuffleRead$
 
LOCAL_BYTES_READ() - Method in class org.apache.spark.InternalAccumulator.shuffleRead$
 
LOCAL_CLUSTER_REGEX() - Static method in class org.apache.spark.SparkMasterRegex
 
LOCAL_N_FAILURES_REGEX() - Static method in class org.apache.spark.SparkMasterRegex
 
LOCAL_N_REGEX() - Static method in class org.apache.spark.SparkMasterRegex
 
localBlocksFetched() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
localBlocksFetched() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetrics
 
localBytesRead() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetrics
 
localCheckpoint() - Static method in class org.apache.spark.api.r.RRDD
 
localCheckpoint() - Static method in class org.apache.spark.graphx.EdgeRDD
 
localCheckpoint() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
localCheckpoint() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
localCheckpoint() - Static method in class org.apache.spark.graphx.VertexRDD
 
localCheckpoint() - Static method in class org.apache.spark.rdd.HadoopRDD
 
localCheckpoint() - Static method in class org.apache.spark.rdd.JdbcRDD
 
localCheckpoint() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
localCheckpoint() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
localCheckpoint() - Method in class org.apache.spark.rdd.RDD
Mark this RDD for local checkpointing using Spark's existing caching layer.
localHostName() - Static method in class org.apache.spark.util.Utils
Get the local machine's hostname.
localHostNameForURI() - Static method in class org.apache.spark.util.Utils
Get the local machine's URI.
localityAwareTasks() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RequestExecutors
 
LocalKMeans - Class in org.apache.spark.mllib.clustering
An utility object to run K-means locally.
LocalKMeans() - Constructor for class org.apache.spark.mllib.clustering.LocalKMeans
 
LocalLDAModel - Class in org.apache.spark.ml.clustering
:: Experimental ::
LocalLDAModel - Class in org.apache.spark.mllib.clustering
Local LDA model.
localProperties() - Method in class org.apache.spark.SparkContext
 
localSeqToDatasetHolder(Seq<T>, Encoder<T>) - Method in class org.apache.spark.sql.SQLImplicits
Creates a Dataset from a local Seq.
localSparkRPackagePath() - Static method in class org.apache.spark.api.r.RUtils
Get the SparkR package path in the local spark distribution.
localValue() - Method in class org.apache.spark.Accumulable
Deprecated.
Get the current value of this accumulator from within a task.
localValue() - Static method in class org.apache.spark.Accumulator
Deprecated.
 
locate(String, Column) - Static method in class org.apache.spark.sql.functions
Locate the position of the first occurrence of substr.
locate(String, Column, int) - Static method in class org.apache.spark.sql.functions
Locate the position of the first occurrence of substr in a string column, after position pos.
location() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
locationUri() - Method in class org.apache.spark.sql.catalog.Database
 
log() - Static method in class org.apache.spark.api.r.RRDD
 
log() - Static method in class org.apache.spark.graphx.EdgeRDD
 
log() - Static method in class org.apache.spark.graphx.GraphLoader
 
log() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
log() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
log() - Static method in class org.apache.spark.graphx.lib.PageRank
 
log() - Static method in class org.apache.spark.graphx.Pregel
 
log() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
log() - Static method in class org.apache.spark.graphx.VertexRDD
 
log() - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
log() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
log() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
log() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
log() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
log() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
log() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
log() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
log() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
log() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
log() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
log() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
log() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
log() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
log() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
log() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
log() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
log() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
log() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
log() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
log() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
log() - Static method in class org.apache.spark.ml.clustering.KMeans
 
log() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
log() - Static method in class org.apache.spark.ml.clustering.LDA
 
log() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
log() - Static method in class org.apache.spark.ml.feature.Binarizer
 
log() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
log() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
log() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
log() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
log() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
log() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
log() - Static method in class org.apache.spark.ml.feature.DCT
 
log() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
log() - Static method in class org.apache.spark.ml.feature.HashingTF
 
log() - Static method in class org.apache.spark.ml.feature.IDF
 
log() - Static method in class org.apache.spark.ml.feature.IDFModel
 
log() - Static method in class org.apache.spark.ml.feature.IndexToString
 
log() - Static method in class org.apache.spark.ml.feature.Interaction
 
log() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
log() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
log() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
log() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
log() - Static method in class org.apache.spark.ml.feature.NGram
 
log() - Static method in class org.apache.spark.ml.feature.Normalizer
 
log() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
log() - Static method in class org.apache.spark.ml.feature.PCA
 
log() - Static method in class org.apache.spark.ml.feature.PCAModel
 
log() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
log() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
log() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
log() - Static method in class org.apache.spark.ml.feature.RFormula
 
log() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
log(Function0<Parsers.Parser<T>>, String) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
log() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
log() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
log() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
log() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
log() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
log() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
log() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
log() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
log() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
log() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
log() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
log() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
log() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
log() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
log() - Static method in class org.apache.spark.ml.Pipeline
 
log() - Static method in class org.apache.spark.ml.PipelineModel
 
log() - Static method in class org.apache.spark.ml.recommendation.ALS
 
log() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
log() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
log() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
log() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
log() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
log() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
log() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
log() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
log() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
log() - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
log() - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
log() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
log() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
log() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
log() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
log() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
log() - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
log() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
log() - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
log() - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
log() - Static method in class org.apache.spark.mllib.clustering.KMeans
 
log() - Static method in class org.apache.spark.mllib.clustering.LDA
 
log() - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
log() - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
log() - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
log() - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
log() - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
log() - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
log() - Static method in class org.apache.spark.mllib.linalg.BLAS
 
log() - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
log() - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
log() - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
log() - Static method in class org.apache.spark.mllib.recommendation.ALS
 
log() - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
log() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
log() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
log() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
log() - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
log() - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
log() - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
log() - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
log() - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
log() - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
log() - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
log() - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
log() - Static method in class org.apache.spark.mllib.tree.model.Node
 
log() - Static method in class org.apache.spark.mllib.tree.RandomForest
 
log() - Static method in class org.apache.spark.mllib.util.DataValidators
 
log() - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
log() - Static method in class org.apache.spark.rdd.HadoopRDD
 
log() - Static method in class org.apache.spark.rdd.JdbcRDD
 
log() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
log() - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
log() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
log() - Static method in class org.apache.spark.rdd.RDD
 
log() - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
log() - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
log() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
log() - Static method in class org.apache.spark.serializer.KryoSerializer
 
log() - Static method in class org.apache.spark.serializer.SerializationDebugger
 
log() - Static method in class org.apache.spark.SparkConf
 
log() - Static method in class org.apache.spark.SparkContext
 
log() - Static method in class org.apache.spark.SparkEnv
 
log() - Static method in class org.apache.spark.sql.Column
 
log(Column) - Static method in class org.apache.spark.sql.functions
Computes the natural logarithm of the given value.
log(String) - Static method in class org.apache.spark.sql.functions
Computes the natural logarithm of the given column.
log(double, Column) - Static method in class org.apache.spark.sql.functions
Returns the first argument-base logarithm of the second argument.
log(double, String) - Static method in class org.apache.spark.sql.functions
Returns the first argument-base logarithm of the second argument.
log() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
log() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
log() - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
log() - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
log() - Static method in class org.apache.spark.sql.SparkSession
 
log() - Static method in class org.apache.spark.sql.SQLContext
 
log() - Static method in class org.apache.spark.sql.types.UDTRegistration
 
log() - Static method in class org.apache.spark.storage.StorageUtils
 
log() - Static method in class org.apache.spark.streaming.CheckpointReader
 
log() - Static method in class org.apache.spark.streaming.dstream.DStream
 
log() - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
log() - Static method in class org.apache.spark.streaming.StreamingContext
 
log() - Static method in class org.apache.spark.streaming.util.RawTextSender
 
log() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
log() - Static method in class org.apache.spark.ui.JettyUtils
 
log() - Static method in class org.apache.spark.ui.UIUtils
 
log() - Static method in class org.apache.spark.util.ClosureCleaner
 
log() - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
log() - Static method in class org.apache.spark.util.ShutdownHookManager
 
log() - Static method in class org.apache.spark.util.SignalUtils
 
log() - Static method in class org.apache.spark.util.SizeEstimator
 
log() - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
log() - Static method in class org.apache.spark.util.Utils
 
log10(Column) - Static method in class org.apache.spark.sql.functions
Computes the logarithm of the given value in base 10.
log10(String) - Static method in class org.apache.spark.sql.functions
Computes the logarithm of the given value in base 10.
log1p(Column) - Static method in class org.apache.spark.sql.functions
Computes the natural logarithm of the given value plus one.
log1p(String) - Static method in class org.apache.spark.sql.functions
Computes the natural logarithm of the given column plus one.
log2(Column) - Static method in class org.apache.spark.sql.functions
Computes the logarithm of the given column in base 2.
log2(String) - Static method in class org.apache.spark.sql.functions
Computes the logarithm of the given value in base 2.
logDebug(Function0<String>) - Static method in class org.apache.spark.api.r.RRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.api.r.RRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.GraphLoader
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.GraphLoader
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.Pregel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.Pregel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logDebug(Function0<String>) - Static method in class org.apache.spark.graphx.VertexRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.VertexRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LDA
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LDA
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.DCT
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.DCT
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDF
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDF
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Interaction
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Interaction
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.NGram
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.NGram
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCA
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCA
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormula
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormula
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.Pipeline
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.Pipeline
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.PipelineModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.PipelineModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logDebug(Function0<String>) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.rdd.RDD
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.RDD
 
logDebug(Function0<String>) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logDebug(Function0<String>) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logDebug(Function0<String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logDebug(Function0<String>) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logDebug(Function0<String>) - Static method in class org.apache.spark.SparkConf
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkConf
 
logDebug(Function0<String>) - Static method in class org.apache.spark.SparkContext
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkContext
 
logDebug(Function0<String>) - Static method in class org.apache.spark.SparkEnv
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkEnv
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.Column
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.Column
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.SparkSession
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SparkSession
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.SQLContext
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SQLContext
 
logDebug(Function0<String>) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logDebug(Function0<String>) - Static method in class org.apache.spark.storage.StorageUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.storage.StorageUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logDebug(Function0<String>) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logDebug(Function0<String>) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logDebug(Function0<String>) - Static method in class org.apache.spark.streaming.StreamingContext
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.StreamingContext
 
logDebug(Function0<String>) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logDebug(Function0<String>) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ui.JettyUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.JettyUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.ui.UIUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.UIUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.ClosureCleaner
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ClosureCleaner
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.SignalUtils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SignalUtils
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.SizeEstimator
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SizeEstimator
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logDebug(Function0<String>) - Static method in class org.apache.spark.util.Utils
 
logDebug(Function0<String>, Throwable) - Static method in class org.apache.spark.util.Utils
 
logDeprecationWarning(String) - Static method in class org.apache.spark.SparkConf
Logs a warning message if the given config key is deprecated.
logError(Function0<String>) - Static method in class org.apache.spark.api.r.RRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.api.r.RRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.GraphLoader
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.GraphLoader
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.Pregel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.Pregel
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logError(Function0<String>) - Static method in class org.apache.spark.graphx.VertexRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.VertexRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LDA
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LDA
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.DCT
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.DCT
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDF
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDF
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Interaction
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Interaction
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.NGram
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.NGram
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCA
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCA
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormula
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormula
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.Pipeline
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.Pipeline
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.PipelineModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.PipelineModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logError(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logError(Function0<String>) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logError(Function0<String>) - Static method in class org.apache.spark.rdd.RDD
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.RDD
 
logError(Function0<String>) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logError(Function0<String>) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logError(Function0<String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logError(Function0<String>) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logError(Function0<String>) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logError(Function0<String>) - Static method in class org.apache.spark.SparkConf
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkConf
 
logError(Function0<String>) - Static method in class org.apache.spark.SparkContext
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkContext
 
logError(Function0<String>) - Static method in class org.apache.spark.SparkEnv
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkEnv
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.Column
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.Column
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.SparkSession
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SparkSession
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.SQLContext
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SQLContext
 
logError(Function0<String>) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logError(Function0<String>) - Static method in class org.apache.spark.storage.StorageUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.storage.StorageUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logError(Function0<String>) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logError(Function0<String>) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logError(Function0<String>) - Static method in class org.apache.spark.streaming.StreamingContext
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.StreamingContext
 
logError(Function0<String>) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logError(Function0<String>) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.ui.JettyUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.JettyUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.ui.UIUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.UIUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.util.ClosureCleaner
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ClosureCleaner
 
logError(Function0<String>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logError(Function0<String>) - Static method in class org.apache.spark.util.SignalUtils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SignalUtils
 
logError(Function0<String>) - Static method in class org.apache.spark.util.SizeEstimator
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SizeEstimator
 
logError(Function0<String>) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logError(Function0<String>) - Static method in class org.apache.spark.util.Utils
 
logError(Function0<String>, Throwable) - Static method in class org.apache.spark.util.Utils
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
logEvent() - Method in interface org.apache.spark.scheduler.SparkListenerEvent
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
logEvent() - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
logicalPlan() - Method in class org.apache.spark.sql.Dataset
 
logInfo(Function0<String>) - Static method in class org.apache.spark.api.r.RRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.api.r.RRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.GraphLoader
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.GraphLoader
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.Pregel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.Pregel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logInfo(Function0<String>) - Static method in class org.apache.spark.graphx.VertexRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.VertexRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LDA
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LDA
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.DCT
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.DCT
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDF
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDF
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Interaction
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Interaction
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.NGram
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.NGram
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCA
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCA
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormula
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormula
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.Pipeline
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.Pipeline
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.PipelineModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.PipelineModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logInfo(Function0<String>) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.rdd.RDD
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.RDD
 
logInfo(Function0<String>) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logInfo(Function0<String>) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logInfo(Function0<String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logInfo(Function0<String>) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logInfo(Function0<String>) - Static method in class org.apache.spark.SparkConf
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkConf
 
logInfo(Function0<String>) - Static method in class org.apache.spark.SparkContext
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkContext
 
logInfo(Function0<String>) - Static method in class org.apache.spark.SparkEnv
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkEnv
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.Column
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.Column
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.SparkSession
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SparkSession
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.SQLContext
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SQLContext
 
logInfo(Function0<String>) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logInfo(Function0<String>) - Static method in class org.apache.spark.storage.StorageUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.storage.StorageUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logInfo(Function0<String>) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logInfo(Function0<String>) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logInfo(Function0<String>) - Static method in class org.apache.spark.streaming.StreamingContext
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.StreamingContext
 
logInfo(Function0<String>) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logInfo(Function0<String>) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ui.JettyUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.JettyUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.ui.UIUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.UIUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.ClosureCleaner
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ClosureCleaner
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.SignalUtils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SignalUtils
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.SizeEstimator
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SizeEstimator
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logInfo(Function0<String>) - Static method in class org.apache.spark.util.Utils
 
logInfo(Function0<String>, Throwable) - Static method in class org.apache.spark.util.Utils
 
LogisticAggregator - Class in org.apache.spark.ml.classification
LogisticAggregator computes the gradient and loss for binary logistic loss function, as used in binary classification for instances in sparse or dense vector in a online fashion.
LogisticAggregator(Vector, int, boolean, double[], double[]) - Constructor for class org.apache.spark.ml.classification.LogisticAggregator
 
LogisticCostFun - Class in org.apache.spark.ml.classification
LogisticCostFun implements Breeze's DiffFunction[T] for a multinomial logistic loss function, as used in multi-class classification (it is also used in binary logistic regression).
LogisticCostFun(RDD<org.apache.spark.ml.feature.Instance>, int, boolean, boolean, double[], double[], double) - Constructor for class org.apache.spark.ml.classification.LogisticCostFun
 
LogisticGradient - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Compute gradient and loss for a multinomial logistic loss function, as used in multi-class classification (it is also used in binary logistic regression).
LogisticGradient(int) - Constructor for class org.apache.spark.mllib.optimization.LogisticGradient
 
LogisticGradient() - Constructor for class org.apache.spark.mllib.optimization.LogisticGradient
 
LogisticRegression - Class in org.apache.spark.ml.classification
:: Experimental :: Logistic regression.
LogisticRegression(String) - Constructor for class org.apache.spark.ml.classification.LogisticRegression
 
LogisticRegression() - Constructor for class org.apache.spark.ml.classification.LogisticRegression
 
LogisticRegressionDataGenerator - Class in org.apache.spark.mllib.util
:: DeveloperApi :: Generate test data for LogisticRegression.
LogisticRegressionDataGenerator() - Constructor for class org.apache.spark.mllib.util.LogisticRegressionDataGenerator
 
LogisticRegressionModel - Class in org.apache.spark.ml.classification
:: Experimental :: Model produced by LogisticRegression.
LogisticRegressionModel - Class in org.apache.spark.mllib.classification
Classification model trained using Multinomial/Binary Logistic Regression.
LogisticRegressionModel(Vector, double, int, int) - Constructor for class org.apache.spark.mllib.classification.LogisticRegressionModel
 
LogisticRegressionModel(Vector, double) - Constructor for class org.apache.spark.mllib.classification.LogisticRegressionModel
Constructs a LogisticRegressionModel with weights and intercept for binary classification.
LogisticRegressionSummary - Interface in org.apache.spark.ml.classification
Abstraction for Logistic Regression Results for a given model.
LogisticRegressionTrainingSummary - Interface in org.apache.spark.ml.classification
Abstraction for multinomial Logistic Regression Training results.
LogisticRegressionWithLBFGS - Class in org.apache.spark.mllib.classification
Train a classification model for Multinomial/Binary Logistic Regression using Limited-memory BFGS.
LogisticRegressionWithLBFGS() - Constructor for class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
 
LogisticRegressionWithSGD - Class in org.apache.spark.mllib.classification
Deprecated.
Use ml.classification.LogisticRegression or LogisticRegressionWithLBFGS. Since 2.0.0.
LogisticRegressionWithSGD() - Constructor for class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
Construct a LogisticRegression object with default parameters: {stepSize: 1.0, numIterations: 100, regParm: 0.01, miniBatchFraction: 1.0}.
logLikelihood(Dataset<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logLikelihood(Dataset<?>) - Method in class org.apache.spark.ml.clustering.LDAModel
Calculates a lower bound on the log likelihood of the entire corpus.
logLikelihood(Dataset<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logLikelihood() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Log likelihood of the observed tokens in the training set, given the current parameter estimates: log P(docs | topics, topic distributions for docs, alpha, eta)
logLikelihood() - Method in class org.apache.spark.mllib.clustering.ExpectationSum
 
logLikelihood(RDD<Tuple2<Object, Vector>>) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Calculates a lower bound on the log likelihood of the entire corpus.
logLikelihood(JavaPairRDD<Long, Vector>) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Java-friendly version of logLikelihood
LogLoss - Class in org.apache.spark.mllib.tree.loss
:: DeveloperApi :: Class for log loss calculation (for classification).
LogLoss() - Constructor for class org.apache.spark.mllib.tree.loss.LogLoss
 
logName() - Static method in class org.apache.spark.api.r.RRDD
 
logName() - Static method in class org.apache.spark.graphx.EdgeRDD
 
logName() - Static method in class org.apache.spark.graphx.GraphLoader
 
logName() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logName() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logName() - Static method in class org.apache.spark.graphx.lib.PageRank
 
logName() - Static method in class org.apache.spark.graphx.Pregel
 
logName() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logName() - Static method in class org.apache.spark.graphx.VertexRDD
 
logName() - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logName() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logName() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logName() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logName() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logName() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logName() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logName() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logName() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logName() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logName() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logName() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logName() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logName() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logName() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logName() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logName() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logName() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logName() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logName() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logName() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logName() - Static method in class org.apache.spark.ml.clustering.KMeans
 
logName() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logName() - Static method in class org.apache.spark.ml.clustering.LDA
 
logName() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logName() - Static method in class org.apache.spark.ml.feature.Binarizer
 
logName() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logName() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logName() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logName() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logName() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logName() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logName() - Static method in class org.apache.spark.ml.feature.DCT
 
logName() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logName() - Static method in class org.apache.spark.ml.feature.HashingTF
 
logName() - Static method in class org.apache.spark.ml.feature.IDF
 
logName() - Static method in class org.apache.spark.ml.feature.IDFModel
 
logName() - Static method in class org.apache.spark.ml.feature.IndexToString
 
logName() - Static method in class org.apache.spark.ml.feature.Interaction
 
logName() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logName() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logName() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logName() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logName() - Static method in class org.apache.spark.ml.feature.NGram
 
logName() - Static method in class org.apache.spark.ml.feature.Normalizer
 
logName() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logName() - Static method in class org.apache.spark.ml.feature.PCA
 
logName() - Static method in class org.apache.spark.ml.feature.PCAModel
 
logName() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logName() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logName() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logName() - Static method in class org.apache.spark.ml.feature.RFormula
 
logName() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logName() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logName() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logName() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logName() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logName() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logName() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logName() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logName() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logName() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logName() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logName() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logName() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logName() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logName() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logName() - Static method in class org.apache.spark.ml.Pipeline
 
logName() - Static method in class org.apache.spark.ml.PipelineModel
 
logName() - Static method in class org.apache.spark.ml.recommendation.ALS
 
logName() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logName() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logName() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logName() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logName() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logName() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logName() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logName() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logName() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logName() - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logName() - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logName() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logName() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logName() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logName() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logName() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logName() - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logName() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logName() - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logName() - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logName() - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logName() - Static method in class org.apache.spark.mllib.clustering.LDA
 
logName() - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logName() - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logName() - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logName() - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logName() - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logName() - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logName() - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logName() - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logName() - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logName() - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logName() - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logName() - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logName() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logName() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logName() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logName() - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logName() - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logName() - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logName() - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logName() - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logName() - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logName() - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logName() - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logName() - Static method in class org.apache.spark.mllib.tree.model.Node
 
logName() - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logName() - Static method in class org.apache.spark.mllib.util.DataValidators
 
logName() - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logName() - Static method in class org.apache.spark.rdd.HadoopRDD
 
logName() - Static method in class org.apache.spark.rdd.JdbcRDD
 
logName() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logName() - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logName() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logName() - Static method in class org.apache.spark.rdd.RDD
 
logName() - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logName() - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logName() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logName() - Static method in class org.apache.spark.serializer.KryoSerializer
 
logName() - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logName() - Static method in class org.apache.spark.SparkConf
 
logName() - Static method in class org.apache.spark.SparkContext
 
logName() - Static method in class org.apache.spark.SparkEnv
 
logName() - Static method in class org.apache.spark.sql.Column
 
logName() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logName() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logName() - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logName() - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logName() - Static method in class org.apache.spark.sql.SparkSession
 
logName() - Static method in class org.apache.spark.sql.SQLContext
 
logName() - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logName() - Static method in class org.apache.spark.storage.StorageUtils
 
logName() - Static method in class org.apache.spark.streaming.CheckpointReader
 
logName() - Static method in class org.apache.spark.streaming.dstream.DStream
 
logName() - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logName() - Static method in class org.apache.spark.streaming.StreamingContext
 
logName() - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logName() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logName() - Static method in class org.apache.spark.ui.JettyUtils
 
logName() - Static method in class org.apache.spark.ui.UIUtils
 
logName() - Static method in class org.apache.spark.util.ClosureCleaner
 
logName() - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logName() - Static method in class org.apache.spark.util.ShutdownHookManager
 
logName() - Static method in class org.apache.spark.util.SignalUtils
 
logName() - Static method in class org.apache.spark.util.SizeEstimator
 
logName() - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logName() - Static method in class org.apache.spark.util.Utils
 
LogNormalGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
LogNormalGenerator(double, double) - Constructor for class org.apache.spark.mllib.random.LogNormalGenerator
 
logNormalGraph(SparkContext, int, int, double, double, long) - Static method in class org.apache.spark.graphx.util.GraphGenerators
Generate a graph whose vertex out degree distribution is log normal.
logNormalJavaRDD(JavaSparkContext, double, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
logNormalJavaRDD(JavaSparkContext, double, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
logNormalJavaRDD(JavaSparkContext, double, double, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
logNormalJavaVectorRDD(JavaSparkContext, double, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
logNormalJavaVectorRDD(JavaSparkContext, double, double, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
logNormalJavaVectorRDD(JavaSparkContext, double, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
logNormalRDD(SparkContext, double, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD comprised of i.i.d. samples from the log normal distribution with the input mean and standard deviation
logNormalVectorRDD(SparkContext, double, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD[Vector] with vectors containing i.i.d. samples drawn from a log normal distribution.
logpdf(Vector) - Method in class org.apache.spark.ml.stat.distribution.MultivariateGaussian
Returns the log-density of this multivariate Gaussian at given point, x
logpdf(Vector) - Method in class org.apache.spark.mllib.stat.distribution.MultivariateGaussian
Returns the log-density of this multivariate Gaussian at given point, x
logPerplexity(Dataset<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logPerplexity(Dataset<?>) - Method in class org.apache.spark.ml.clustering.LDAModel
Calculate an upper bound bound on perplexity.
logPerplexity(Dataset<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logPerplexity(RDD<Tuple2<Object, Vector>>) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Calculate an upper bound bound on perplexity.
logPerplexity(JavaPairRDD<Long, Vector>) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Java-friendly version of logPerplexity
logPrior() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
Log probability of the current parameter estimate: log P(topics, topic distributions for docs | Dirichlet hyperparameters)
logPrior() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Log probability of the current parameter estimate: log P(topics, topic distributions for docs | alpha, eta)
logStartFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
logStartToJson(org.apache.spark.scheduler.SparkListenerLogStart) - Static method in class org.apache.spark.util.JsonProtocol
 
logTrace(Function0<String>) - Static method in class org.apache.spark.api.r.RRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.api.r.RRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.GraphLoader
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.GraphLoader
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.Pregel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.Pregel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logTrace(Function0<String>) - Static method in class org.apache.spark.graphx.VertexRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.VertexRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LDA
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LDA
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.DCT
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.DCT
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDF
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDF
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Interaction
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Interaction
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.NGram
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.NGram
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCA
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCA
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormula
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormula
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.Pipeline
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.Pipeline
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.PipelineModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.PipelineModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logTrace(Function0<String>) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.rdd.RDD
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.RDD
 
logTrace(Function0<String>) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logTrace(Function0<String>) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logTrace(Function0<String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logTrace(Function0<String>) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logTrace(Function0<String>) - Static method in class org.apache.spark.SparkConf
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkConf
 
logTrace(Function0<String>) - Static method in class org.apache.spark.SparkContext
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkContext
 
logTrace(Function0<String>) - Static method in class org.apache.spark.SparkEnv
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkEnv
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.Column
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.Column
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.SparkSession
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SparkSession
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.SQLContext
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SQLContext
 
logTrace(Function0<String>) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logTrace(Function0<String>) - Static method in class org.apache.spark.storage.StorageUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.storage.StorageUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logTrace(Function0<String>) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logTrace(Function0<String>) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logTrace(Function0<String>) - Static method in class org.apache.spark.streaming.StreamingContext
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.StreamingContext
 
logTrace(Function0<String>) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logTrace(Function0<String>) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ui.JettyUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.JettyUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.ui.UIUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.UIUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.ClosureCleaner
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ClosureCleaner
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.SignalUtils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SignalUtils
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.SizeEstimator
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SizeEstimator
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logTrace(Function0<String>) - Static method in class org.apache.spark.util.Utils
 
logTrace(Function0<String>, Throwable) - Static method in class org.apache.spark.util.Utils
 
logUncaughtExceptions(Function0<T>) - Static method in class org.apache.spark.util.Utils
Execute the given block, logging and re-throwing any uncaught exception.
logUrlMap() - Method in class org.apache.spark.scheduler.cluster.ExecutorInfo
 
logUrls() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor
 
logWarning(Function0<String>) - Static method in class org.apache.spark.api.r.RRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.api.r.RRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.EdgeRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.GraphLoader
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.GraphLoader
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.lib.PageRank
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.Pregel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.Pregel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
logWarning(Function0<String>) - Static method in class org.apache.spark.graphx.VertexRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.graphx.VertexRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeans
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LDA
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LDA
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Binarizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.DCT
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.DCT
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.HashingTF
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDF
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDF
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IDFModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.IndexToString
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Interaction
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Interaction
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.NGram
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.NGram
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Normalizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCA
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCA
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PCAModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormula
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormula
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.Pipeline
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.Pipeline
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.PipelineModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.PipelineModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALS
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeans
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.KMeans
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LDA
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.LocalKMeans
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.AssociationRules
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.FPGrowth
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.fpm.PrefixSpan
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.BLAS
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.optimization.LBFGS
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.ALS
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.DecisionTree
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.model.Node
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.tree.RandomForest
 
logWarning(Function0<String>) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.mllib.util.DataValidators
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.AsyncRDDActions
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.HadoopRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.JdbcRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.rdd.RDD
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.rdd.RDD
 
logWarning(Function0<String>) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
logWarning(Function0<String>) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.InputFormatInfo
 
logWarning(Function0<String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
logWarning(Function0<String>) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.KryoSerializer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.serializer.SerializationDebugger
 
logWarning(Function0<String>) - Static method in class org.apache.spark.SparkConf
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkConf
 
logWarning(Function0<String>) - Static method in class org.apache.spark.SparkContext
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkContext
 
logWarning(Function0<String>) - Static method in class org.apache.spark.SparkEnv
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.SparkEnv
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.Column
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.Column
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.hive.orc.OrcFilters
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.SparkSession
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SparkSession
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.SQLContext
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.SQLContext
 
logWarning(Function0<String>) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.sql.types.UDTRegistration
 
logWarning(Function0<String>) - Static method in class org.apache.spark.storage.StorageUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.storage.StorageUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.CheckpointReader
 
logWarning(Function0<String>) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.dstream.DStream
 
logWarning(Function0<String>) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
logWarning(Function0<String>) - Static method in class org.apache.spark.streaming.StreamingContext
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.StreamingContext
 
logWarning(Function0<String>) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
logWarning(Function0<String>) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ui.JettyUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.JettyUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.ui.UIUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.ui.UIUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.ClosureCleaner
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ClosureCleaner
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.random.StratifiedSamplingUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.ShutdownHookManager
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.SignalUtils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SignalUtils
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.SizeEstimator
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SizeEstimator
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
logWarning(Function0<String>) - Static method in class org.apache.spark.util.Utils
 
logWarning(Function0<String>, Throwable) - Static method in class org.apache.spark.util.Utils
 
LONG() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable long type.
longAccumulator() - Method in class org.apache.spark.SparkContext
Create and register a long accumulator, which starts with 0 and accumulates inputs by +=.
longAccumulator(String) - Method in class org.apache.spark.SparkContext
Create and register a long accumulator, which starts with 0 and accumulates inputs by +=.
LongAccumulator - Class in org.apache.spark.util
An accumulator for computing sum, count, and averages for 64-bit integers.
LongAccumulator() - Constructor for class org.apache.spark.util.LongAccumulator
 
LongParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Long] for Java.
LongParam(String, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.LongParam
 
LongParam(String, String, String) - Constructor for class org.apache.spark.ml.param.LongParam
 
LongParam(Identifiable, String, String, Function1<Object, Object>) - Constructor for class org.apache.spark.ml.param.LongParam
 
LongParam(Identifiable, String, String) - Constructor for class org.apache.spark.ml.param.LongParam
 
LongType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the LongType object.
LongType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Long values.
lookup(K) - Method in class org.apache.spark.api.java.JavaPairRDD
Return the list of values in the RDD for key key.
lookup(K) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return the list of values in the RDD for key key.
lookupRpcTimeout(SparkConf) - Static method in class org.apache.spark.util.RpcUtils
Returns the default Spark timeout to use for RPC remote endpoint lookup.
loss() - Method in class org.apache.spark.ml.classification.LogisticAggregator
 
loss() - Method in class org.apache.spark.ml.regression.AFTAggregator
 
loss() - Method in class org.apache.spark.ml.regression.LeastSquaresAggregator
 
loss() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
Loss - Interface in org.apache.spark.mllib.tree.loss
:: DeveloperApi :: Trait for adding "pluggable" loss functions for the gradient boosting algorithm.
Losses - Class in org.apache.spark.mllib.tree.loss
 
Losses() - Constructor for class org.apache.spark.mllib.tree.loss.Losses
 
LossReasonPending - Class in org.apache.spark.scheduler
A loss reason that means we don't yet know why the executor exited.
LossReasonPending() - Constructor for class org.apache.spark.scheduler.LossReasonPending
 
lossType() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
lossType() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
lossType() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
lossType() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
LOST() - Static method in class org.apache.spark.TaskState
 
low() - Method in class org.apache.spark.partial.BoundedDouble
 
lower(Column) - Static method in class org.apache.spark.sql.functions
Converts a string column to lower case.
lpad(Column, int, String) - Static method in class org.apache.spark.sql.functions
Left-pad the string column with
lt(double) - Static method in class org.apache.spark.ml.param.ParamValidators
Check if value < upperBound
lt(Object) - Method in class org.apache.spark.sql.Column
Less than.
ltEq(double) - Static method in class org.apache.spark.ml.param.ParamValidators
Check if value <= upperBound
ltrim(Column) - Static method in class org.apache.spark.sql.functions
Trim the spaces from left end for the specified string value.
LZ4BlockInputStream - Class in org.apache.spark.io
InputStream implementation to decode data written with LZ4BlockOutputStream.
LZ4BlockInputStream(InputStream, LZ4FastDecompressor, Checksum) - Constructor for class org.apache.spark.io.LZ4BlockInputStream
Create a new InputStream.
LZ4BlockInputStream(InputStream, LZ4FastDecompressor) - Constructor for class org.apache.spark.io.LZ4BlockInputStream
Create a new instance using XXHash32 for checksuming.
LZ4BlockInputStream(InputStream) - Constructor for class org.apache.spark.io.LZ4BlockInputStream
Create a new instance which uses the fastest LZ4FastDecompressor available.
LZ4CompressionCodec - Class in org.apache.spark.io
:: DeveloperApi :: LZ4 implementation of CompressionCodec.
LZ4CompressionCodec(SparkConf) - Constructor for class org.apache.spark.io.LZ4CompressionCodec
 
LZFCompressionCodec - Class in org.apache.spark.io
:: DeveloperApi :: LZF implementation of CompressionCodec.
LZFCompressionCodec(SparkConf) - Constructor for class org.apache.spark.io.LZFCompressionCodec
 

M

main(String[]) - Static method in class org.apache.spark.ml.param.shared.SharedParamsCodeGen
 
main(String[]) - Static method in class org.apache.spark.mllib.util.KMeansDataGenerator
 
main(String[]) - Static method in class org.apache.spark.mllib.util.LinearDataGenerator
 
main(String[]) - Static method in class org.apache.spark.mllib.util.LogisticRegressionDataGenerator
 
main(String[]) - Static method in class org.apache.spark.mllib.util.MFDataGenerator
 
main(String[]) - Static method in class org.apache.spark.mllib.util.SVMDataGenerator
 
main(String[]) - Static method in class org.apache.spark.streaming.util.RawTextSender
 
main(String[]) - Static method in class org.apache.spark.ui.UIWorkloadGenerator
 
makeBinarySearch(Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.util.CollectionsUtils
 
makeCopy(Object[]) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
makeDataset(Seq<T>, SparkSession, TypeTags.TypeTag<T>) - Static method in class org.apache.spark.sql.internal.CatalogImpl
 
makeDescription(String, String, boolean) - Static method in class org.apache.spark.ui.UIUtils
Returns HTML rendering of a job or stage description.
makeDriverRef(String, SparkConf, org.apache.spark.rpc.RpcEnv) - Static method in class org.apache.spark.util.RpcUtils
Retrieve a RpcEndpointRef which is located in the driver via its name.
makeProgressBar(int, int, int, int, int) - Static method in class org.apache.spark.ui.UIUtils
 
makeRDD(Seq<T>, int, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Distribute a local Scala collection to form an RDD.
makeRDD(Seq<Tuple2<T, Seq<String>>>, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Distribute a local Scala collection to form an RDD, with one or more location preferences (hostnames of Spark nodes) for each object.
map(Function<T, R>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
map(Function<T, R>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
map(Function<T, R>) - Static method in class org.apache.spark.api.java.JavaRDD
 
map(Function<T, R>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to all elements of this RDD.
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
map(Function1<Object, Object>) - Method in interface org.apache.spark.ml.linalg.Matrix
Map the values of this matrix using a function.
map(Function1<Object, Object>) - Method in interface org.apache.spark.mllib.linalg.Matrix
Map the values of this matrix using a function.
map(Function1<R, T>) - Method in class org.apache.spark.partial.PartialResult
Transform this PartialResult into a PartialResult of type T.
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
map(Function1<T, U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
map(Function1<T, U>, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD by applying a function to all elements of this RDD.
map(DataType, DataType) - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type map.
map(MapType) - Method in class org.apache.spark.sql.ColumnName
 
map(Function1<T, U>, Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a new Dataset that contains the result of applying func to each element.
map(MapFunction<T, U>, Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Java-specific) Returns a new Dataset that contains the result of applying func to each element.
map(Column...) - Static method in class org.apache.spark.sql.functions
Creates a new map column.
map(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Creates a new map column.
map(Function1<BaseType, A>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
map(Function1<A, B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
map(Function<T, R>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
map(Function<T, R>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream by applying a function to all elements of this DStream.
map(Function<T, R>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
map(Function<T, R>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
map(Function<T, R>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
map(Function<T, R>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
map(Function<T, R>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
map(Function1<T, U>, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream by applying a function to all elements of this DStream.
mapAsSerializableJavaMap(Map<A, B>) - Static method in class org.apache.spark.api.java.JavaUtils
 
mapChildren(Function1<BaseType, BaseType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
mapEdgePartitions(Function2<Object, EdgePartition<ED, VD>, EdgePartition<ED2, VD2>>, ClassTag<ED2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapEdges(Function1<Edge<ED>, ED2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.Graph
Transforms each edge attribute in the graph using the map function.
mapEdges(Function2<Object, Iterator<Edge<ED>>, Iterator<ED2>>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.Graph
Transforms each edge attribute using the map function, passing it a whole partition at a time.
mapEdges(Function2<Object, Iterator<Edge<ED>>, Iterator<ED2>>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
mapFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
-------------------------------- * Util JSON deserialization methods |
MapFunction<T,U> - Interface in org.apache.spark.api.java.function
Base interface for a map function used in Dataset's map function.
mapGroups(Function2<K, Iterator<V>, U>, Encoder<U>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Applies the given function to each group of data.
mapGroups(MapGroupsFunction<K, V, U>, Encoder<U>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Applies the given function to each group of data.
MapGroupsFunction<K,V,R> - Interface in org.apache.spark.api.java.function
Base interface for a map function used in GroupedDataset's mapGroup function.
mapId() - Method in class org.apache.spark.FetchFailed
 
mapId() - Method in class org.apache.spark.storage.ShuffleBlockId
 
mapId() - Method in class org.apache.spark.storage.ShuffleDataBlockId
 
mapId() - Method in class org.apache.spark.storage.ShuffleIndexBlockId
 
mapOutputTracker() - Method in class org.apache.spark.SparkEnv
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitions(FlatMapFunction<Iterator<T>, U>, boolean) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitions(FlatMapFunction<Iterator<T>, U>, boolean) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitions(FlatMapFunction<Iterator<T>, U>, boolean) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD.
mapPartitions(FlatMapFunction<Iterator<T>, U>, boolean) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD.
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD by applying a function to each partition of this RDD.
mapPartitions(Function1<Iterator<T>, Iterator<U>>, Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Returns a new Dataset that contains the result of applying func to each partition.
mapPartitions(MapPartitionsFunction<T, U>, Encoder<U>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Java-specific) Returns a new Dataset that contains the result of applying f to each partition.
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying mapPartitions() to each RDDs of this DStream.
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
mapPartitions(FlatMapFunction<Iterator<T>, U>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
mapPartitions(Function1<Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD is generated by applying mapPartitions() to each RDDs of this DStream.
mapPartitions$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
mapPartitions$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
mapPartitions$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapPartitions$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapPartitions$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
mapPartitions$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
mapPartitions$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
mapPartitions$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
mapPartitions$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
MapPartitionsFunction<T,U> - Interface in org.apache.spark.api.java.function
Base interface for function used in Dataset's mapPartitions.
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
mapPartitionsInternal$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>, boolean) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>, boolean) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>, boolean) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD.
mapPartitionsToDouble(DoubleFlatMapFunction<Iterator<T>>, boolean) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD.
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>, boolean) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>, boolean) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>, boolean) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD.
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>, boolean) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD.
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying mapPartitions() to each RDDs of this DStream.
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
mapPartitionsToPair(PairFlatMapFunction<Iterator<T>, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
mapPartitionsWithIndex(Function2<Integer, Iterator<T>, Iterator<R>>, boolean) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitionsWithIndex(Function2<Integer, Iterator<T>, Iterator<R>>, boolean) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitionsWithIndex(Function2<Integer, Iterator<T>, Iterator<R>>, boolean) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitionsWithIndex(Function2<Integer, Iterator<T>, Iterator<R>>, boolean) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to each partition of this RDD, while tracking the index of the original partition.
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
mapPartitionsWithIndex(Function2<Object, Iterator<T>, Iterator<U>>, boolean, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD by applying a function to each partition of this RDD, while tracking the index of the original partition.
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.api.java.JavaRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
mapPartitionsWithIndex$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
mapPartitionsWithInputSplit(Function2<InputSplit, Iterator<Tuple2<K, V>>, Iterator<R>>, boolean) - Method in class org.apache.spark.api.java.JavaHadoopRDD
Maps over a partition, providing the InputSplit that was used as the base of the partition.
mapPartitionsWithInputSplit(Function2<InputSplit, Iterator<Tuple2<K, V>>, Iterator<R>>, boolean) - Method in class org.apache.spark.api.java.JavaNewHadoopRDD
Maps over a partition, providing the InputSplit that was used as the base of the partition.
mapPartitionsWithInputSplit(Function2<InputSplit, Iterator<Tuple2<K, V>>, Iterator<U>>, boolean, ClassTag<U>) - Method in class org.apache.spark.rdd.HadoopRDD
Maps over a partition, providing the InputSplit that was used as the base of the partition.
mapPartitionsWithInputSplit(Function2<InputSplit, Iterator<Tuple2<K, V>>, Iterator<U>>, boolean, ClassTag<U>) - Method in class org.apache.spark.rdd.NewHadoopRDD
Maps over a partition, providing the InputSplit that was used as the base of the partition.
mapredInputFormat() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
mapreduceInputFormat() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
mapSideCombine() - Method in class org.apache.spark.ShuffleDependency
 
mapToDouble(DoubleFunction<T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapToDouble(DoubleFunction<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapToDouble(DoubleFunction<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapToDouble(DoubleFunction<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to all elements of this RDD.
mapToJson(Map<String, String>) - Static method in class org.apache.spark.util.JsonProtocol
------------------------------ * Util JSON serialization methods |
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.api.java.JavaRDD
 
mapToPair(PairFunction<T, K2, V2>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return a new RDD by applying a function to all elements of this RDD.
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
mapToPair(PairFunction<T, K2, V2>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream by applying a function to all elements of this DStream.
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
mapToPair(PairFunction<T, K2, V2>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
mapTriplets(Function1<EdgeTriplet<VD, ED>, ED2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.Graph
Transforms each edge attribute using the map function, passing it the adjacent vertex attributes as well.
mapTriplets(Function1<EdgeTriplet<VD, ED>, ED2>, TripletFields, ClassTag<ED2>) - Method in class org.apache.spark.graphx.Graph
Transforms each edge attribute using the map function, passing it the adjacent vertex attributes as well.
mapTriplets(Function2<Object, Iterator<EdgeTriplet<VD, ED>>, Iterator<ED2>>, TripletFields, ClassTag<ED2>) - Method in class org.apache.spark.graphx.Graph
Transforms each edge attribute a partition at a time using the map function, passing it the adjacent vertex attributes as well.
mapTriplets(Function2<Object, Iterator<EdgeTriplet<VD, ED>>, Iterator<ED2>>, TripletFields, ClassTag<ED2>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
MapType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type for Maps.
MapType(DataType, DataType, boolean) - Constructor for class org.apache.spark.sql.types.MapType
 
MapType() - Constructor for class org.apache.spark.sql.types.MapType
No-arg constructor for kryo.
mapValues(Function<V, U>) - Method in class org.apache.spark.api.java.JavaPairRDD
Pass each value in the key-value pair RDD through a map function without changing the keys; this also retains the original RDD's partitioning.
mapValues(Function1<Edge<ED>, ED2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.EdgeRDD
Map the values in an edge partitioning preserving the structure but changing the values.
mapValues(Function1<Edge<ED>, ED2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
mapValues(Function1<VD, VD2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapValues(Function2<Object, VD, VD2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
mapValues(Function1<VD, VD2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.VertexRDD
Maps each vertex attribute, preserving the index.
mapValues(Function2<Object, VD, VD2>, ClassTag<VD2>) - Method in class org.apache.spark.graphx.VertexRDD
Maps each vertex attribute, additionally supplying the vertex ID.
mapValues(Function1<V, U>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Pass each value in the key-value pair RDD through a map function without changing the keys; this also retains the original RDD's partitioning.
mapValues(Function<V, U>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying a map function to the value of each key-value pairs in 'this' DStream without changing the key.
mapValues(Function<V, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
mapValues(Function<V, U>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
mapValues(Function1<V, U>, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying a map function to the value of each key-value pairs in 'this' DStream without changing the key.
mapVertices(Function2<Object, VD, VD2>, ClassTag<VD2>, Predef.$eq$colon$eq<VD, VD2>) - Method in class org.apache.spark.graphx.Graph
Transforms each vertex attribute in the graph using the map function.
mapVertices(Function2<Object, VD, VD2>, ClassTag<VD2>, Predef.$eq$colon$eq<VD, VD2>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
mapVertices$default$3(Function2<Object, VD, VD2>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
mapWithState(StateSpec<K, V, StateType, MappedType>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
:: Experimental :: Return a JavaMapWithStateDStream by applying a function to every key-value element of this stream, while maintaining some state data for each unique key.
mapWithState(StateSpec<K, V, StateType, MappedType>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
mapWithState(StateSpec<K, V, StateType, MappedType>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
mapWithState(StateSpec<K, V, StateType, MappedType>, ClassTag<StateType>, ClassTag<MappedType>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
:: Experimental :: Return a MapWithStateDStream by applying a function to every key-value element of this stream, while maintaining some state data for each unique key.
MapWithStateDStream<KeyType,ValueType,StateType,MappedType> - Class in org.apache.spark.streaming.dstream
:: Experimental :: DStream representing the stream of data generated by mapWithState operation on a pair DStream.
MapWithStateDStream(StreamingContext, ClassTag<MappedType>) - Constructor for class org.apache.spark.streaming.dstream.MapWithStateDStream
 
mark(int) - Method in class org.apache.spark.io.LZ4BlockInputStream
 
mark(int) - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
markSupported() - Method in class org.apache.spark.io.LZ4BlockInputStream
 
markSupported() - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
mask(Graph<VD2, ED2>, ClassTag<VD2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.Graph
Restricts the graph to only the vertices and edges that are also in other, but keeps the attributes from this graph.
mask(Graph<VD2, ED2>, ClassTag<VD2>, ClassTag<ED2>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
master() - Method in class org.apache.spark.api.java.JavaSparkContext
 
master() - Method in class org.apache.spark.SparkContext
 
master(String) - Method in class org.apache.spark.sql.SparkSession.Builder
Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster.
Matrices - Class in org.apache.spark.ml.linalg
Factory methods for Matrix.
Matrices() - Constructor for class org.apache.spark.ml.linalg.Matrices
 
Matrices - Class in org.apache.spark.mllib.linalg
Factory methods for Matrix.
Matrices() - Constructor for class org.apache.spark.mllib.linalg.Matrices
 
Matrix - Interface in org.apache.spark.ml.linalg
Trait for a local matrix.
Matrix - Interface in org.apache.spark.mllib.linalg
Trait for a local matrix.
MatrixEntry - Class in org.apache.spark.mllib.linalg.distributed
Represents an entry in an distributed matrix.
MatrixEntry(long, long, double) - Constructor for class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
MatrixFactorizationModel - Class in org.apache.spark.mllib.recommendation
Model representing the result of matrix factorization.
MatrixFactorizationModel(int, RDD<Tuple2<Object, double[]>>, RDD<Tuple2<Object, double[]>>) - Constructor for class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
MatrixFactorizationModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.recommendation
 
MatrixFactorizationModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.recommendation.MatrixFactorizationModel.SaveLoadV1_0$
 
MatrixImplicits - Class in org.apache.spark.mllib.linalg
Implicit methods available in Scala for converting Matrix to Matrix and vice versa.
MatrixImplicits() - Constructor for class org.apache.spark.mllib.linalg.MatrixImplicits
 
max() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Returns the maximum element from this RDD as defined by the default comparator natural order.
max(Comparator<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
max(Comparator<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
max(Comparator<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Returns the maximum element from this RDD as defined by the specified Comparator[T].
max(Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
max(Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
max(Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
max(Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
max(Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
MAX() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
max() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
max() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
max() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
max() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Maximum value of each dimension.
max() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Maximum value of each column.
max(Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
max(Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
max(Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
max(Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
max(Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Returns the max of this RDD as defined by the implicit Ordering[T].
max(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the maximum value of the expression in a group.
max(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the maximum value of the column in a group.
max(String...) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the max value for each numeric columns for each group.
max(Seq<String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the max value for each numeric columns for each group.
max(Ordering<B>) - Static method in class org.apache.spark.sql.types.StructType
 
max(Duration) - Method in class org.apache.spark.streaming.Duration
 
max(Time) - Method in class org.apache.spark.streaming.Time
 
max(long, long) - Static method in class org.apache.spark.streaming.util.RawTextHelper
 
max() - Method in class org.apache.spark.util.StatCounter
 
MAX_INT_DIGITS() - Static method in class org.apache.spark.sql.types.Decimal
Maximum number of decimal digits a Int can represent
MAX_LONG_DIGITS() - Static method in class org.apache.spark.sql.types.Decimal
Maximum number of decimal digits a Long can represent
MAX_PRECISION() - Static method in class org.apache.spark.sql.types.DecimalType
 
MAX_SCALE() - Static method in class org.apache.spark.sql.types.DecimalType
 
maxAbs() - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
MaxAbsScaler - Class in org.apache.spark.ml.feature
:: Experimental :: Rescale each feature individually to range [-1, 1] by dividing through the largest maximum absolute value in each feature.
MaxAbsScaler(String) - Constructor for class org.apache.spark.ml.feature.MaxAbsScaler
 
MaxAbsScaler() - Constructor for class org.apache.spark.ml.feature.MaxAbsScaler
 
MaxAbsScalerModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by MaxAbsScaler.
maxBins() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
maxBins() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
maxBins() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
maxBins() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
maxBins() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
maxBins() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
maxBins() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
maxBins() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
maxBins() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
maxBins() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
maxBins() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
maxBins() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
maxBins() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
maxBufferSizeMb() - Method in class org.apache.spark.serializer.KryoSerializer
 
maxBy(Function1<A, B>, Ordering<B>) - Static method in class org.apache.spark.sql.types.StructType
 
maxCategories() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
maxCategories() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
maxCores() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
maxDepth() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
maxDepth() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
maxDepth() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
maxDepth() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
maxDepth() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
maxDepth() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
maxDepth() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
maxDepth() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
maxDepth() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
maxDepth() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
maxDepth() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
maxDepth() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
maxDepth() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
maxId() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
maxId() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
maxId() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
maxId() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
maxId() - Static method in class org.apache.spark.rdd.CheckpointState
 
maxId() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
maxId() - Static method in class org.apache.spark.scheduler.TaskLocality
 
maxId() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
maxId() - Static method in class org.apache.spark.TaskState
 
maxIter() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
maxIter() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
maxIter() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
maxIter() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
maxIter() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
maxIter() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
maxIter() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
maxIter() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
maxIter() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
maxIter() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
maxIter() - Static method in class org.apache.spark.ml.clustering.KMeans
 
maxIter() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
maxIter() - Static method in class org.apache.spark.ml.clustering.LDA
 
maxIter() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
maxIter() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
maxIter() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
maxIter() - Static method in class org.apache.spark.ml.recommendation.ALS
 
maxIter() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
maxIter() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
maxIter() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
maxIter() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
maxIter() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
maxIter() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
maxIter() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
maxIter() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
maxIters() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
maxMem() - Method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
maxMem() - Method in class org.apache.spark.storage.StorageStatus
 
maxMemory() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
maxMemoryInMB() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
maxMemoryInMB() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
maxMemSize() - Method in class org.apache.spark.storage.BlockManagerMessages.RegisterBlockManager
 
maxMessageSizeBytes(SparkConf) - Static method in class org.apache.spark.util.RpcUtils
Returns the configured max message size for messages in bytes.
maxNodesInLevel(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Return the maximum number of nodes which can be in the given level of the tree.
maxTasks() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
maxVal() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
md5(Column) - Static method in class org.apache.spark.sql.functions
Calculates the MD5 digest of a binary column and returns the value as a 32 character hex string.
mean() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute the mean of this RDD's elements.
mean() - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
mean() - Method in class org.apache.spark.ml.stat.distribution.MultivariateGaussian
 
mean() - Method in class org.apache.spark.mllib.feature.StandardScalerModel
 
mean() - Method in class org.apache.spark.mllib.random.ExponentialGenerator
 
mean() - Method in class org.apache.spark.mllib.random.LogNormalGenerator
 
mean() - Method in class org.apache.spark.mllib.random.PoissonGenerator
 
mean() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Sample mean of each dimension.
mean() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Sample mean vector.
mean() - Method in class org.apache.spark.partial.BoundedDouble
 
mean() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute the mean of this RDD's elements.
mean(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the average of the values in a group.
mean(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the average of the values in a group.
mean(String...) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the average value for each numeric columns for each group.
mean(Seq<String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the average value for each numeric columns for each group.
mean() - Method in class org.apache.spark.util.StatCounter
 
meanAbsoluteError() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Returns the mean absolute error, which is a risk function corresponding to the expected value of the absolute error loss or l1-norm loss.
meanAbsoluteError() - Method in class org.apache.spark.mllib.evaluation.RegressionMetrics
Returns the mean absolute error, which is a risk function corresponding to the expected value of the absolute error loss or l1-norm loss.
meanApprox(long, Double) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return the approximate mean of the elements in this RDD.
meanApprox(long) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Approximate operation to return the mean within a timeout.
meanApprox(long, double) - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Approximate operation to return the mean within a timeout.
meanAveragePrecision() - Method in class org.apache.spark.mllib.evaluation.RankingMetrics
Returns the mean average precision (MAP) of all the queries.
means() - Method in class org.apache.spark.mllib.clustering.ExpectationSum
 
meanSquaredError() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Returns the mean squared error, which is a risk function corresponding to the expected value of the squared error loss or quadratic loss.
meanSquaredError() - Method in class org.apache.spark.mllib.evaluation.RegressionMetrics
Returns the mean squared error, which is a risk function corresponding to the expected value of the squared error loss or quadratic loss.
megabytesToString(long) - Static method in class org.apache.spark.util.Utils
Convert a quantity in megabytes to a human-readable string such as "4.0 MB".
MEMORY_AND_DISK - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_AND_DISK() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_AND_DISK_2 - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_AND_DISK_2() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_AND_DISK_SER - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_AND_DISK_SER() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_AND_DISK_SER_2 - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_AND_DISK_SER_2() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_BYTES_SPILLED() - Static method in class org.apache.spark.InternalAccumulator
 
MEMORY_ONLY - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_ONLY() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_ONLY_2 - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_ONLY_2() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_ONLY_SER - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_ONLY_SER() - Static method in class org.apache.spark.storage.StorageLevel
 
MEMORY_ONLY_SER_2 - Static variable in class org.apache.spark.api.java.StorageLevels
 
MEMORY_ONLY_SER_2() - Static method in class org.apache.spark.storage.StorageLevel
 
memoryBytesSpilled() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
memoryBytesSpilled() - Method in class org.apache.spark.status.api.v1.StageData
 
memoryBytesSpilled() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
memoryBytesSpilled() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
memoryBytesSpilled() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
memoryBytesSpilled() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
MemoryEntry<T> - Interface in org.apache.spark.storage.memory
 
memoryManager() - Method in class org.apache.spark.SparkEnv
 
memoryMode() - Method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
memoryMode() - Method in interface org.apache.spark.storage.memory.MemoryEntry
 
memoryMode() - Method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
MemoryParam - Class in org.apache.spark.util
An extractor object for parsing JVM memory strings, such as "10g", into an Int representing the number of megabytes.
MemoryParam() - Constructor for class org.apache.spark.util.MemoryParam
 
memoryPerExecutorMB() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
memoryRemaining() - Method in class org.apache.spark.status.api.v1.RDDDataDistribution
 
memoryStringToMb(String) - Static method in class org.apache.spark.util.Utils
Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of mebibytes.
memoryUsed() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
memoryUsed() - Method in class org.apache.spark.status.api.v1.RDDDataDistribution
 
memoryUsed() - Method in class org.apache.spark.status.api.v1.RDDPartitionInfo
 
memoryUsed() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
memRemaining() - Method in class org.apache.spark.storage.StorageStatus
Return the memory remaining in this block manager.
memSize() - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
memSize() - Method in class org.apache.spark.storage.BlockStatus
 
memSize() - Method in class org.apache.spark.storage.BlockUpdatedInfo
 
memSize() - Method in class org.apache.spark.storage.RDDInfo
 
memUsed() - Method in class org.apache.spark.storage.StorageStatus
Return the memory used by this block manager.
memUsedByRdd(int) - Method in class org.apache.spark.storage.StorageStatus
Return the memory used by the given RDD in this block manager in O(1) time.
merge(R) - Method in class org.apache.spark.Accumulable
Deprecated.
Merge two accumulable objects together
merge(R) - Static method in class org.apache.spark.Accumulator
Deprecated.
 
merge(LogisticAggregator) - Method in class org.apache.spark.ml.classification.LogisticAggregator
Merge another LogisticAggregator, and update the loss and gradient of the objective function.
merge(AFTAggregator) - Method in class org.apache.spark.ml.regression.AFTAggregator
Merge another AFTAggregator, and update the loss and gradient of the objective function.
merge(LeastSquaresAggregator) - Method in class org.apache.spark.ml.regression.LeastSquaresAggregator
Merge another LeastSquaresAggregator, and update the loss and gradient of the objective function.
merge(IDF.DocumentFrequencyAggregator) - Method in class org.apache.spark.mllib.feature.IDF.DocumentFrequencyAggregator
Merges another.
merge(MultivariateOnlineSummarizer) - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Merge another MultivariateOnlineSummarizer, and update the statistical summary.
merge(BUF, BUF) - Method in class org.apache.spark.sql.expressions.Aggregator
Merge two intermediate values.
merge(MutableAggregationBuffer, Row) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Merges two aggregation buffers and stores the updated buffer values back to buffer1.
merge(AccumulatorV2<IN, OUT>) - Method in class org.apache.spark.util.AccumulatorV2
Merges another same-type accumulator into this one and update its state, i.e.
merge(AccumulatorV2<Double, Double>) - Method in class org.apache.spark.util.DoubleAccumulator
 
merge(AccumulatorV2<T, R>) - Method in class org.apache.spark.util.LegacyAccumulatorWrapper
 
merge(AccumulatorV2<T, List<T>>) - Method in class org.apache.spark.util.ListAccumulator
 
merge(AccumulatorV2<Long, Long>) - Method in class org.apache.spark.util.LongAccumulator
 
merge(double) - Method in class org.apache.spark.util.StatCounter
Add a value into this StatCounter, updating the internal statistics.
merge(TraversableOnce<Object>) - Method in class org.apache.spark.util.StatCounter
Add multiple values into this StatCounter, updating the internal statistics.
merge(StatCounter) - Method in class org.apache.spark.util.StatCounter
Merge another StatCounter into this one, adding up the internal statistics.
mergeCombiners() - Method in class org.apache.spark.Aggregator
 
mergeInPlace(BloomFilter) - Method in class org.apache.spark.util.sketch.BloomFilter
Combines this bloom filter with another bloom filter by performing a bitwise OR of the underlying data.
mergeInPlace(CountMinSketch) - Method in class org.apache.spark.util.sketch.CountMinSketch
Merges another CountMinSketch with this one in place.
mergeValue() - Method in class org.apache.spark.Aggregator
 
MESOS_REGEX() - Static method in class org.apache.spark.SparkMasterRegex
 
MesosSchedulerBackendUtil - Class in org.apache.spark.scheduler.cluster.mesos
A collection of utility functions which can be used by both the MesosSchedulerBackend and the CoarseMesosSchedulerBackend.
MesosSchedulerBackendUtil() - Constructor for class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
 
message() - Method in class org.apache.spark.FetchFailed
 
message() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutorFailed
 
message() - Static method in class org.apache.spark.scheduler.ExecutorKilled
 
message() - Static method in class org.apache.spark.scheduler.LossReasonPending
 
message() - Method in exception org.apache.spark.sql.AnalysisException
 
message() - Method in exception org.apache.spark.sql.ContinuousQueryException
 
MetaAlgorithmReadWrite - Class in org.apache.spark.ml.util
Default Meta-Algorithm read and write implementation.
MetaAlgorithmReadWrite() - Constructor for class org.apache.spark.ml.util.MetaAlgorithmReadWrite
 
Metadata - Class in org.apache.spark.sql.types
:: DeveloperApi ::
Metadata() - Constructor for class org.apache.spark.sql.types.Metadata
No-arg constructor for kryo.
metadata() - Method in class org.apache.spark.sql.types.StructField
 
metadata() - Method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
METADATA_KEY_DESCRIPTION() - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
The key for description in StreamInputInfo.metadata.
MetadataBuilder - Class in org.apache.spark.sql.types
:: DeveloperApi ::
MetadataBuilder() - Constructor for class org.apache.spark.sql.types.MetadataBuilder
 
metadataDescription() - Method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
MetadataUtils - Class in org.apache.spark.ml.util
Helper utilities for algorithms using ML metadata
MetadataUtils() - Constructor for class org.apache.spark.ml.util.MetadataUtils
 
method() - Method in class org.apache.spark.mllib.stat.test.ChiSqTestResult
 
MethodIdentifier<T> - Class in org.apache.spark.util
Helper class to identify a method.
MethodIdentifier(Class<T>, String, String) - Constructor for class org.apache.spark.util.MethodIdentifier
 
methodName() - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
methodName() - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
metricName() - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
param for metric name in evaluation (supports "areaUnderROC" (default), "areaUnderPR")
metricName() - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
param for metric name in evaluation (supports "f1" (default), "precision", "recall", "weightedPrecision", "weightedRecall", "accuracy")
metricName() - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
Param for metric name in evaluation.
metrics() - Method in class org.apache.spark.ui.jobs.UIData.TaskUIData
 
METRICS_PREFIX() - Static method in class org.apache.spark.InternalAccumulator
 
metricsSystem() - Method in class org.apache.spark.SparkEnv
 
MFDataGenerator - Class in org.apache.spark.mllib.util
:: DeveloperApi :: Generate RDD(s) containing data for Matrix Factorization.
MFDataGenerator() - Constructor for class org.apache.spark.mllib.util.MFDataGenerator
 
microF1Measure() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns micro-averaged label-based f1-measure (equals to micro-averaged document-based f1-measure)
microPrecision() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns micro-averaged label-based precision (equals to micro-averaged document-based precision)
microRecall() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns micro-averaged label-based recall (equals to micro-averaged document-based recall)
mightContain(Object) - Method in class org.apache.spark.util.sketch.BloomFilter
Returns true if the element might have been put in this Bloom filter, false if this is definitely not the case.
mightContainBinary(byte[]) - Method in class org.apache.spark.util.sketch.BloomFilter
A specialized variant of BloomFilter.mightContain(Object) that only tests byte array items.
mightContainLong(long) - Method in class org.apache.spark.util.sketch.BloomFilter
A specialized variant of BloomFilter.mightContain(Object) that only tests long items.
mightContainString(String) - Method in class org.apache.spark.util.sketch.BloomFilter
A specialized variant of BloomFilter.mightContain(Object) that only tests String items.
milliseconds() - Method in class org.apache.spark.streaming.Duration
 
milliseconds(long) - Static method in class org.apache.spark.streaming.Durations
 
Milliseconds - Class in org.apache.spark.streaming
Helper object that creates instance of Duration representing a given number of milliseconds.
Milliseconds() - Constructor for class org.apache.spark.streaming.Milliseconds
 
milliseconds() - Method in class org.apache.spark.streaming.Time
 
millisToString(long) - Static method in class org.apache.spark.scheduler.StatsReportListener
Reformat a time interval in milliseconds to a prettier format for output
min() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Returns the minimum element from this RDD as defined by the default comparator natural order.
min(Comparator<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
min(Comparator<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
min(Comparator<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Returns the minimum element from this RDD as defined by the specified Comparator[T].
min(Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
min(Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
min(Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
min(Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
min(Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
MIN() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
min() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
min() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
min() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
min() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Minimum value of each dimension.
min() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Minimum value of each column.
min(Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
min(Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
min(Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
min(Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
min(Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Returns the min of this RDD as defined by the implicit Ordering[T].
min(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the minimum value of the expression in a group.
min(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the minimum value of the column in a group.
min(String...) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the min value for each numeric column for each group.
min(Seq<String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the min value for each numeric column for each group.
min(Ordering<B>) - Static method in class org.apache.spark.sql.types.StructType
 
min(Duration) - Method in class org.apache.spark.streaming.Duration
 
min(Time) - Method in class org.apache.spark.streaming.Time
 
min() - Method in class org.apache.spark.util.StatCounter
 
minBy(Function1<A, B>, Ordering<B>) - Static method in class org.apache.spark.sql.types.StructType
 
minCount() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
minCount() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
minDF() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
minDF() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
minDivisibleClusterSize() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
minDivisibleClusterSize() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
minDocFreq() - Static method in class org.apache.spark.ml.feature.IDF
 
minDocFreq() - Static method in class org.apache.spark.ml.feature.IDFModel
 
minDocFreq() - Method in class org.apache.spark.mllib.feature.IDF.DocumentFrequencyAggregator
 
minDocFreq() - Method in class org.apache.spark.mllib.feature.IDF
 
minInfoGain() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
minInfoGain() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
minInfoGain() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
minInfoGain() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
minInfoGain() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
minInfoGain() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
minInfoGain() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
minInfoGain() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
minInfoGain() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
minInfoGain() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
minInfoGain() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
minInfoGain() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
minInfoGain() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
minInstancesPerNode() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
minInstancesPerNode() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
MinMax() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
MinMaxScaler - Class in org.apache.spark.ml.feature
:: Experimental :: Rescale each feature individually to a common range [min, max] linearly using column summary statistics, which is also known as min-max normalization or Rescaling.
MinMaxScaler(String) - Constructor for class org.apache.spark.ml.feature.MinMaxScaler
 
MinMaxScaler() - Constructor for class org.apache.spark.ml.feature.MinMaxScaler
 
MinMaxScalerModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by MinMaxScaler.
minSamplingRate() - Static method in class org.apache.spark.util.random.BinomialBounds
 
minTF() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
minTF() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
minTokenLength() - Method in class org.apache.spark.ml.feature.RegexTokenizer
Minimum token length, >= 0.
minus(RDD<Tuple2<Object, VD>>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
minus(VertexRDD<VD>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
minus(RDD<Tuple2<Object, VD>>) - Method in class org.apache.spark.graphx.VertexRDD
For each VertexId present in both this and other, minus will act as a set difference operation returning only those unique VertexId's present in this.
minus(VertexRDD<VD>) - Method in class org.apache.spark.graphx.VertexRDD
For each VertexId present in both this and other, minus will act as a set difference operation returning only those unique VertexId's present in this.
minus(Object) - Method in class org.apache.spark.sql.Column
Subtraction.
minus(Duration) - Method in class org.apache.spark.streaming.Duration
 
minus(Time) - Method in class org.apache.spark.streaming.Time
 
minus(Duration) - Method in class org.apache.spark.streaming.Time
 
minute(Column) - Static method in class org.apache.spark.sql.functions
Extracts the minutes as an integer from a given date/timestamp/string.
minutes() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
minutes(long) - Static method in class org.apache.spark.streaming.Durations
 
Minutes - Class in org.apache.spark.streaming
Helper object that creates instance of Duration representing a given number of minutes.
Minutes() - Constructor for class org.apache.spark.streaming.Minutes
 
minVal() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
missingInput() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
mkList() - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
mkString() - Method in interface org.apache.spark.sql.Row
Displays all elements of this sequence in a string (without a separator).
mkString(String) - Method in interface org.apache.spark.sql.Row
Displays all elements of this sequence in a string using a separator string.
mkString(String, String, String) - Method in interface org.apache.spark.sql.Row
Displays all elements of this traversable or iterator in a string using start, end, and separator strings.
mkString(String, String, String) - Static method in class org.apache.spark.sql.types.StructType
 
mkString(String) - Static method in class org.apache.spark.sql.types.StructType
 
mkString() - Static method in class org.apache.spark.sql.types.StructType
 
ML_ATTR() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
mlDenseMatrixToMLlibDenseMatrix(DenseMatrix) - Static method in class org.apache.spark.mllib.linalg.MatrixImplicits
 
mlDenseVectorToMLlibDenseVector(DenseVector) - Static method in class org.apache.spark.mllib.linalg.VectorImplicits
 
mllibDenseMatrixToMLDenseMatrix(DenseMatrix) - Static method in class org.apache.spark.mllib.linalg.MatrixImplicits
 
mllibDenseVectorToMLDenseVector(DenseVector) - Static method in class org.apache.spark.mllib.linalg.VectorImplicits
 
mllibMatrixToMLMatrix(Matrix) - Static method in class org.apache.spark.mllib.linalg.MatrixImplicits
 
mllibSparseMatrixToMLSparseMatrix(SparseMatrix) - Static method in class org.apache.spark.mllib.linalg.MatrixImplicits
 
mllibSparseVectorToMLSparseVector(SparseVector) - Static method in class org.apache.spark.mllib.linalg.VectorImplicits
 
mllibVectorToMLVector(Vector) - Static method in class org.apache.spark.mllib.linalg.VectorImplicits
 
mlMatrixToMLlibMatrix(Matrix) - Static method in class org.apache.spark.mllib.linalg.MatrixImplicits
 
MLPairRDDFunctions<K,V> - Class in org.apache.spark.mllib.rdd
Machine learning specific Pair RDD functions.
MLPairRDDFunctions(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>) - Constructor for class org.apache.spark.mllib.rdd.MLPairRDDFunctions
 
MLReadable<T> - Interface in org.apache.spark.ml.util
Trait for objects that provide MLReader.
MLReader<T> - Class in org.apache.spark.ml.util
Abstract class for utility classes that can load ML instances.
MLReader() - Constructor for class org.apache.spark.ml.util.MLReader
 
mlSparseMatrixToMLlibSparseMatrix(SparseMatrix) - Static method in class org.apache.spark.mllib.linalg.MatrixImplicits
 
mlSparseVectorToMLlibSparseVector(SparseVector) - Static method in class org.apache.spark.mllib.linalg.VectorImplicits
 
MLUtils - Class in org.apache.spark.mllib.util
Helper methods to load, save and pre-process data used in ML Lib.
MLUtils() - Constructor for class org.apache.spark.mllib.util.MLUtils
 
mlVectorToMLlibVector(Vector) - Static method in class org.apache.spark.mllib.linalg.VectorImplicits
 
MLWritable - Interface in org.apache.spark.ml.util
Trait for classes that provide MLWriter.
MLWriter - Class in org.apache.spark.ml.util
Abstract class for utility classes that can save ML instances.
MLWriter() - Constructor for class org.apache.spark.ml.util.MLWriter
 
mod(Object) - Method in class org.apache.spark.sql.Column
Modulo (a.k.a.
mode(SaveMode) - Method in class org.apache.spark.sql.DataFrameWriter
Specifies the behavior when data or table already exists.
mode(String) - Method in class org.apache.spark.sql.DataFrameWriter
Specifies the behavior when data or table already exists.
Model<M extends Model<M>> - Class in org.apache.spark.ml
:: DeveloperApi :: A fitted model, i.e., a Transformer produced by an Estimator.
Model() - Constructor for class org.apache.spark.ml.Model
 
model() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Private copy of model to ensure Params are not modified outside this class.
model() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Deprecated.
The model field is deprecated and will be removed in 2.1.0. Since 2.0.0.
model() - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
 
model() - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
model() - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
The model to be updated and used for prediction.
model() - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
 
models() - Method in class org.apache.spark.ml.classification.OneVsRestModel
 
modelType() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
modelType() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
modelType() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
modelType() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$.Data
 
MODULE$ - Static variable in class org.apache.spark.AccumulatorParam.DoubleAccumulatorParam$
Deprecated.
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.AccumulatorParam.FloatAccumulatorParam$
Deprecated.
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.AccumulatorParam.IntAccumulatorParam$
Deprecated.
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.AccumulatorParam.LongAccumulatorParam$
Deprecated.
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.AccumulatorParam.StringAccumulatorParam$
Deprecated.
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.graphx.PartitionStrategy.CanonicalRandomVertexCut$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.graphx.PartitionStrategy.EdgePartition1D$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.graphx.PartitionStrategy.EdgePartition2D$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.graphx.PartitionStrategy.RandomVertexCut$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.InternalAccumulator.input$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.InternalAccumulator.output$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.InternalAccumulator.shuffleRead$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.InternalAccumulator.shuffleWrite$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.Pipeline.SharedReadWrite$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.recommendation.ALS.InBlock$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.recommendation.ALS.Rating$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.recommendation.ALS.RatingBlock$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.CLogLog$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Family$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Identity$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Inverse$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Link$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Log$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Logit$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Probit$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Sqrt$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ml.tree.EnsembleModelReadWrite.EnsembleNodeData$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.clustering.BisectingKMeansModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.clustering.KMeansModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.clustering.PowerIterationClustering.Assignment$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.feature.ChiSqSelectorModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.fpm.FPGrowthModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.fpm.PrefixSpan.Postfix$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.fpm.PrefixSpan.Prefix$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.fpm.PrefixSpanModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.stat.test.ChiSqTest.Method$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.stat.test.ChiSqTest.NullHypothesis$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest.NullHypothesis$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.rdd.HadoopRDD.HadoopMapPartitionsWithSplitRDD$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.rdd.NewHadoopRDD.NewHadoopMapPartitionsWithSplitRDD$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.GetExecutorLossReason$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillExecutors$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillTask$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.LaunchTask$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterClusterManager$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisteredExecutor$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutor$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RegisterExecutorFailed$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RemoveExecutor$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RequestExecutors$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RetrieveLastAllocatedExecutorId$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RetrieveSparkProps$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.ReviveOffers$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.SetupDriver$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.Shutdown$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StopDriver$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StopExecutor$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StopExecutors$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.RelationalGroupedDataset.CubeType$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.RelationalGroupedDataset.GroupByType$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.RelationalGroupedDataset.PivotType$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.RelationalGroupedDataset.RollupType$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.types.Decimal.DecimalAsIfIntegral$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.types.Decimal.DecimalIsFractional$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.types.DecimalType.Expression$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.sql.types.DecimalType.Fixed$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.BlockManagerHeartbeat$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetBlockStatus$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetExecutorEndpointRef$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetLocations$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetLocationsMultipleBlockIds$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetMatchingBlockIds$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetMemoryStatus$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetPeers$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.GetStorageStatus$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.HasCachedBlocks$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.RegisterBlockManager$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.RemoveBlock$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.RemoveBroadcast$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.RemoveExecutor$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.RemoveRdd$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.RemoveShuffle$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.StopBlockManagerMaster$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.TriggerThreadDump$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.streaming.kafka.KafkaCluster.SimpleConsumerConfig$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ui.JettyUtils.ServletParams$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ui.jobs.UIData.ExecutorUIData$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ui.jobs.UIData.JobUIData$
Static reference to the singleton instance of this Scala object.
MODULE$ - Static variable in class org.apache.spark.ui.jobs.UIData.TaskUIData$
Static reference to the singleton instance of this Scala object.
monotonically_increasing_id() - Static method in class org.apache.spark.sql.functions
A column expression that generates monotonically increasing 64-bit integers.
monotonicallyIncreasingId() - Static method in class org.apache.spark.sql.functions
A column expression that generates monotonically increasing 64-bit integers.
month(Column) - Static method in class org.apache.spark.sql.functions
Extracts the month as an integer from a given date/timestamp/string.
months_between(Column, Column) - Static method in class org.apache.spark.sql.functions
 
msDurationToString(long) - Static method in class org.apache.spark.util.Utils
Returns a human-readable string representing a duration such as "35ms"
MsSqlServerDialect - Class in org.apache.spark.sql.jdbc
 
MsSqlServerDialect() - Constructor for class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
mu() - Method in class org.apache.spark.mllib.stat.distribution.MultivariateGaussian
 
MulticlassClassificationEvaluator - Class in org.apache.spark.ml.evaluation
:: Experimental :: Evaluator for multiclass classification, which expects two input columns: score and label.
MulticlassClassificationEvaluator(String) - Constructor for class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
MulticlassClassificationEvaluator() - Constructor for class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
MulticlassMetrics - Class in org.apache.spark.mllib.evaluation
::Experimental:: Evaluator for multiclass classification.
MulticlassMetrics(RDD<Tuple2<Object, Object>>) - Constructor for class org.apache.spark.mllib.evaluation.MulticlassMetrics
 
MultilabelMetrics - Class in org.apache.spark.mllib.evaluation
Evaluator for multilabel classification.
MultilabelMetrics(RDD<Tuple2<double[], double[]>>) - Constructor for class org.apache.spark.mllib.evaluation.MultilabelMetrics
 
multiLabelValidator(int) - Static method in class org.apache.spark.mllib.util.DataValidators
Function to check if labels used for k class multi-label classification are in the range of {0, 1, ..., k - 1}.
MultilayerPerceptronClassificationModel - Class in org.apache.spark.ml.classification
:: Experimental :: Classification model based on the Multilayer Perceptron.
MultilayerPerceptronClassifier - Class in org.apache.spark.ml.classification
:: Experimental :: Classifier trainer based on the Multilayer Perceptron.
MultilayerPerceptronClassifier(String) - Constructor for class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
MultilayerPerceptronClassifier() - Constructor for class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
multiply(DenseMatrix) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
multiply(DenseVector) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
multiply(Vector) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
multiply(DenseMatrix) - Method in interface org.apache.spark.ml.linalg.Matrix
Convenience method for `Matrix`-`DenseMatrix` multiplication.
multiply(DenseVector) - Method in interface org.apache.spark.ml.linalg.Matrix
Convenience method for `Matrix`-`DenseVector` multiplication.
multiply(Vector) - Method in interface org.apache.spark.ml.linalg.Matrix
Convenience method for `Matrix`-`Vector` multiplication.
multiply(DenseMatrix) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
multiply(DenseVector) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
multiply(Vector) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
multiply(DenseMatrix) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
multiply(DenseVector) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
multiply(Vector) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
multiply(BlockMatrix) - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Left multiplies this BlockMatrix to other, another BlockMatrix.
multiply(Matrix) - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Multiply this matrix by a local matrix on the right.
multiply(Matrix) - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Multiply this matrix by a local matrix on the right.
multiply(DenseMatrix) - Method in interface org.apache.spark.mllib.linalg.Matrix
Convenience method for `Matrix`-`DenseMatrix` multiplication.
multiply(DenseVector) - Method in interface org.apache.spark.mllib.linalg.Matrix
Convenience method for `Matrix`-`DenseVector` multiplication.
multiply(Vector) - Method in interface org.apache.spark.mllib.linalg.Matrix
Convenience method for `Matrix`-`Vector` multiplication.
multiply(DenseMatrix) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
multiply(DenseVector) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
multiply(Vector) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
multiply(Object) - Method in class org.apache.spark.sql.Column
Multiplication of this expression and another expression.
MultivariateGaussian - Class in org.apache.spark.ml.stat.distribution
This class provides basic functionality for a Multivariate Gaussian (Normal) Distribution.
MultivariateGaussian(Vector, Matrix) - Constructor for class org.apache.spark.ml.stat.distribution.MultivariateGaussian
 
MultivariateGaussian - Class in org.apache.spark.mllib.stat.distribution
:: DeveloperApi :: This class provides basic functionality for a Multivariate Gaussian (Normal) Distribution.
MultivariateGaussian(Vector, Matrix) - Constructor for class org.apache.spark.mllib.stat.distribution.MultivariateGaussian
 
MultivariateOnlineSummarizer - Class in org.apache.spark.mllib.stat
:: DeveloperApi :: MultivariateOnlineSummarizer implements MultivariateStatisticalSummary to compute the mean, variance, minimum, maximum, counts, and nonzero counts for instances in sparse or dense vector format in a online fashion.
MultivariateOnlineSummarizer() - Constructor for class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
 
MultivariateStatisticalSummary - Interface in org.apache.spark.mllib.stat
Trait for multivariate statistical summary of a data matrix.
MutableAggregationBuffer - Class in org.apache.spark.sql.expressions
:: Experimental :: A Row representing an mutable aggregation buffer.
MutableAggregationBuffer() - Constructor for class org.apache.spark.sql.expressions.MutableAggregationBuffer
 
MutablePair<T1,T2> - Class in org.apache.spark.util
:: DeveloperApi :: A tuple of 2 elements.
MutablePair(T1, T2) - Constructor for class org.apache.spark.util.MutablePair
 
MutablePair() - Constructor for class org.apache.spark.util.MutablePair
No-arg constructor for serialization
myName() - Method in class org.apache.spark.util.InnerClosureFinder
 
MySQLDialect - Class in org.apache.spark.sql.jdbc
 
MySQLDialect() - Constructor for class org.apache.spark.sql.jdbc.MySQLDialect
 

N

n() - Method in class org.apache.spark.ml.feature.NGram
Minimum n-gram length, >= 1.
n() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
na() - Method in class org.apache.spark.sql.Dataset
Returns a DataFrameNaFunctions for working with missing data.
NaiveBayes - Class in org.apache.spark.ml.classification
:: Experimental :: Naive Bayes Classifiers.
NaiveBayes(String) - Constructor for class org.apache.spark.ml.classification.NaiveBayes
 
NaiveBayes() - Constructor for class org.apache.spark.ml.classification.NaiveBayes
 
NaiveBayes - Class in org.apache.spark.mllib.classification
Trains a Naive Bayes model given an RDD of (label, features) pairs.
NaiveBayes(double) - Constructor for class org.apache.spark.mllib.classification.NaiveBayes
 
NaiveBayes() - Constructor for class org.apache.spark.mllib.classification.NaiveBayes
 
NaiveBayesModel - Class in org.apache.spark.ml.classification
:: Experimental :: Model produced by NaiveBayes param: pi log of class priors, whose dimension is C (number of classes) param: theta log of class conditional probabilities, whose dimension is C (number of classes) by D (number of features)
NaiveBayesModel - Class in org.apache.spark.mllib.classification
Model for Naive Bayes Classifiers.
NaiveBayesModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.classification
 
NaiveBayesModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$
 
NaiveBayesModel.SaveLoadV1_0$.Data - Class in org.apache.spark.mllib.classification
Model data for model import/export
NaiveBayesModel.SaveLoadV1_0$.Data(double[], double[], double[][]) - Constructor for class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$.Data
 
NaiveBayesModel.SaveLoadV2_0$ - Class in org.apache.spark.mllib.classification
 
NaiveBayesModel.SaveLoadV2_0$() - Constructor for class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$
 
NaiveBayesModel.SaveLoadV2_0$.Data - Class in org.apache.spark.mllib.classification
Model data for model import/export
NaiveBayesModel.SaveLoadV2_0$.Data(double[], double[], double[][], String) - Constructor for class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$.Data
 
name() - Method in class org.apache.spark.Accumulable
Deprecated.
 
name() - Static method in class org.apache.spark.Accumulator
Deprecated.
 
name() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
name() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
name() - Static method in class org.apache.spark.api.java.JavaRDD
 
name() - Method in interface org.apache.spark.api.java.JavaRDDLike
 
name() - Static method in class org.apache.spark.api.r.RRDD
 
name() - Static method in class org.apache.spark.graphx.EdgeRDD
 
name() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
name() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
name() - Static method in class org.apache.spark.graphx.VertexRDD
 
name() - Method in class org.apache.spark.ml.attribute.Attribute
Name of the attribute.
name() - Method in class org.apache.spark.ml.attribute.AttributeGroup
 
NAME() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
name() - Method in class org.apache.spark.ml.attribute.AttributeType
 
name() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
name() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
name() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
name() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
name() - Static method in class org.apache.spark.ml.param.DoubleParam
 
name() - Static method in class org.apache.spark.ml.param.FloatParam
 
name() - Method in class org.apache.spark.ml.param.Param
 
name() - Method in class org.apache.spark.mllib.stat.test.ChiSqTest.Method
 
name() - Static method in class org.apache.spark.rdd.HadoopRDD
 
name() - Static method in class org.apache.spark.rdd.JdbcRDD
 
name() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
name() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
name() - Method in class org.apache.spark.rdd.RDD
A friendly name for this RDD
name() - Method in class org.apache.spark.scheduler.AccumulableInfo
 
name() - Method in class org.apache.spark.scheduler.StageInfo
 
name() - Method in interface org.apache.spark.SparkStageInfo
 
name() - Method in class org.apache.spark.SparkStageInfoImpl
 
name() - Method in class org.apache.spark.sql.catalog.Column
 
name() - Method in class org.apache.spark.sql.catalog.Database
 
name() - Method in class org.apache.spark.sql.catalog.Function
 
name() - Method in class org.apache.spark.sql.catalog.Table
 
name(String) - Method in class org.apache.spark.sql.Column
Gives the column a name (alias).
name() - Method in interface org.apache.spark.sql.ContinuousQuery
Returns the name of the query.
name(String) - Method in class org.apache.spark.sql.TypedColumn
Gives the TypedColumn a name (alias).
name() - Method in class org.apache.spark.sql.types.StructField
 
name() - Method in class org.apache.spark.status.api.v1.AccumulableInfo
 
name() - Method in class org.apache.spark.status.api.v1.ApplicationInfo
 
name() - Method in class org.apache.spark.status.api.v1.JobData
 
name() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
name() - Method in class org.apache.spark.status.api.v1.StageData
 
name() - Method in class org.apache.spark.storage.BlockId
A globally unique identifier for this Block.
name() - Method in class org.apache.spark.storage.BroadcastBlockId
 
name() - Method in class org.apache.spark.storage.RDDBlockId
 
name() - Method in class org.apache.spark.storage.RDDInfo
 
name() - Method in class org.apache.spark.storage.ShuffleBlockId
 
name() - Method in class org.apache.spark.storage.ShuffleDataBlockId
 
name() - Method in class org.apache.spark.storage.ShuffleIndexBlockId
 
name() - Method in class org.apache.spark.storage.StreamBlockId
 
name() - Method in class org.apache.spark.storage.TaskResultBlockId
 
name() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
name() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
name() - Method in class org.apache.spark.util.AccumulatorV2
Returns the name of this accumulator, can only be called after registration.
name() - Method in class org.apache.spark.util.MethodIdentifier
 
name_$eq(String) - Static method in class org.apache.spark.api.r.RRDD
 
name_$eq(String) - Static method in class org.apache.spark.graphx.EdgeRDD
 
name_$eq(String) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
name_$eq(String) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
name_$eq(String) - Static method in class org.apache.spark.graphx.VertexRDD
 
name_$eq(String) - Static method in class org.apache.spark.rdd.HadoopRDD
 
name_$eq(String) - Static method in class org.apache.spark.rdd.JdbcRDD
 
name_$eq(String) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
name_$eq(String) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
namedThreadFactory(String) - Static method in class org.apache.spark.util.ThreadUtils
Create a thread factory that names threads with a prefix and also sets the threads to daemon.
names() - Method in class org.apache.spark.ml.feature.VectorSlicer
An array of feature names to select features from a vector column.
nameToObjectMap() - Static method in class org.apache.spark.mllib.stat.correlation.CorrelationNames
 
nanSafeCompareDoubles(double, double) - Static method in class org.apache.spark.util.Utils
NaN-safe version of java.lang.Double.compare() which allows NaN values to be compared according to semantics where NaN == NaN and NaN > any non-NaN double.
nanSafeCompareFloats(float, float) - Static method in class org.apache.spark.util.Utils
NaN-safe version of java.lang.Float.compare() which allows NaN values to be compared according to semantics where NaN == NaN and NaN > any non-NaN float.
nanvl(Column, Column) - Static method in class org.apache.spark.sql.functions
Returns col1 if it is not NaN, or col2 if col1 is NaN.
NarrowDependency<T> - Class in org.apache.spark
:: DeveloperApi :: Base class for dependencies where each partition of the child RDD depends on a small number of partitions of the parent RDD.
NarrowDependency(RDD<T>) - Constructor for class org.apache.spark.NarrowDependency
 
ndcgAt(int) - Method in class org.apache.spark.mllib.evaluation.RankingMetrics
Compute the average NDCG value of all the queries, truncated at ranking position k.
needConversion() - Method in class org.apache.spark.sql.sources.BaseRelation
Whether does it need to convert the objects in Row to internal representation, for example: java.lang.String -> UTF8String java.lang.Decimal -> Decimal
negate(Column) - Static method in class org.apache.spark.sql.functions
Unary minus, i.e.
newAPIHadoopFile(String, Class<F>, Class<K>, Class<V>, Configuration) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a given Hadoop file with an arbitrary new API InputFormat and extra configuration options to pass to the input format.
newAPIHadoopFile(String, ClassTag<K>, ClassTag<V>, ClassTag<F>) - Method in class org.apache.spark.SparkContext
Get an RDD for a Hadoop file with an arbitrary new API InputFormat.
newAPIHadoopFile(String, Class<F>, Class<K>, Class<V>, Configuration) - Method in class org.apache.spark.SparkContext
Get an RDD for a given Hadoop file with an arbitrary new API InputFormat and extra configuration options to pass to the input format.
newAPIHadoopRDD(Configuration, Class<F>, Class<K>, Class<V>) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a given Hadoop file with an arbitrary new API InputFormat and extra configuration options to pass to the input format.
newAPIHadoopRDD(Configuration, Class<F>, Class<K>, Class<V>) - Method in class org.apache.spark.SparkContext
Get an RDD for a given Hadoop file with an arbitrary new API InputFormat and extra configuration options to pass to the input format.
newBooleanArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBooleanEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBooleanSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedBooleanEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedByteEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedDoubleEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedFloatEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedIntEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedLongEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBoxedShortEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newBuilder() - Static method in class org.apache.spark.sql.types.StructType
 
newBuilder() - Static method in class org.apache.spark.sql.types.StructType
 
newByteArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newByteEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newByteSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newClientForExecution(SparkConf, Configuration) - Static method in class org.apache.spark.sql.hive.HiveUtils
Create a HiveClient used for execution.
newClientForMetadata(SparkConf, Configuration) - Static method in class org.apache.spark.sql.hive.HiveUtils
Create a HiveClient used to retrieve metadata from the Hive MetaStore.
newClientForMetadata(SparkConf, Configuration, Map<String, String>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
newDaemonCachedThreadPool(String) - Static method in class org.apache.spark.util.ThreadUtils
Wrapper over newCachedThreadPool.
newDaemonCachedThreadPool(String, int, int) - Static method in class org.apache.spark.util.ThreadUtils
Create a cached thread pool whose max number of threads is maxThreadNumber.
newDaemonFixedThreadPool(int, String) - Static method in class org.apache.spark.util.ThreadUtils
Wrapper over newFixedThreadPool.
newDaemonSingleThreadExecutor(String) - Static method in class org.apache.spark.util.ThreadUtils
Wrapper over newSingleThreadExecutor.
newDaemonSingleThreadScheduledExecutor(String) - Static method in class org.apache.spark.util.ThreadUtils
Wrapper over ScheduledThreadPoolExecutor.
newDoubleArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newDoubleEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newDoubleSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newFloatArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newFloatEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newFloatSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newForkJoinPool(String, int) - Static method in class org.apache.spark.util.ThreadUtils
Construct a new Scala ForkJoinPool with a specified max parallelism and name prefix.
NewHadoopRDD<K,V> - Class in org.apache.spark.rdd
:: DeveloperApi :: An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS, sources in HBase, or S3), using the new MapReduce API (org.apache.hadoop.mapreduce).
NewHadoopRDD(SparkContext, Class<? extends InputFormat<K, V>>, Class<K>, Class<V>, Configuration) - Constructor for class org.apache.spark.rdd.NewHadoopRDD
 
NewHadoopRDD.NewHadoopMapPartitionsWithSplitRDD$ - Class in org.apache.spark.rdd
 
NewHadoopRDD.NewHadoopMapPartitionsWithSplitRDD$() - Constructor for class org.apache.spark.rdd.NewHadoopRDD.NewHadoopMapPartitionsWithSplitRDD$
 
newId() - Static method in class org.apache.spark.util.AccumulatorContext
Returns a globally unique ID for a new AccumulatorV2.
newInstance() - Method in class org.apache.spark.serializer.JavaSerializer
 
newInstance() - Method in class org.apache.spark.serializer.KryoSerializer
 
newInstance() - Method in class org.apache.spark.serializer.Serializer
Creates a new SerializerInstance.
newIntArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newIntEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newIntSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newKryo() - Method in class org.apache.spark.serializer.KryoSerializer
 
newKryoOutput() - Method in class org.apache.spark.serializer.KryoSerializer
 
newLongArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newLongEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newLongSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newMutableProjection(Seq<Expression>, Seq<Attribute>, boolean) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
newMutableProjection$default$3() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
newNaturalAscendingOrdering(Seq<DataType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
newOrdering(Seq<SortOrder>, Seq<Attribute>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
newPredicate(Expression, Seq<Attribute>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
newProductArrayEncoder(TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.SQLImplicits
 
newProductEncoder(TypeTags.TypeTag<T>) - Method in class org.apache.spark.sql.SQLImplicits
 
newProductSeqEncoder(TypeTags.TypeTag<A>) - Method in class org.apache.spark.sql.SQLImplicits
 
newSession() - Method in class org.apache.spark.sql.hive.HiveContext
Deprecated.
Returns a new HiveContext as new session, which will have separated SQLConf, UDF/UDAF, temporary tables and SessionState, but sharing the same CacheManager, IsolatedClientLoader and Hive client (both of execution and metadata) with existing HiveContext.
newSession() - Method in class org.apache.spark.sql.SparkSession
Start a new session with isolated SQL configurations, temporary tables, registered functions are isolated, but sharing the underlying SparkContext and cached data.
newSession() - Method in class org.apache.spark.sql.SQLContext
Returns a SQLContext as new session, with separated SQL configurations, temporary tables, registered functions, but sharing the same SparkContext, cached data and other things.
newShortArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newShortEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newShortSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newStringArrayEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newStringEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newStringSeqEncoder() - Method in class org.apache.spark.sql.SQLImplicits
 
newTemporaryConfiguration(boolean) - Static method in class org.apache.spark.sql.hive.HiveUtils
Constructs a configuration for hive, where the metastore is located in a temp directory.
next() - Method in class org.apache.spark.InterruptibleIterator
 
next() - Method in interface org.apache.spark.mllib.clustering.LDAOptimizer
 
next_day(Column, String) - Static method in class org.apache.spark.sql.functions
Given a date column, returns the first date which is later than the value of the date column that is on the specified day of the week.
nextId() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
nextId() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
nextId() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
nextId() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
nextId() - Static method in class org.apache.spark.rdd.CheckpointState
 
nextId() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
nextId() - Static method in class org.apache.spark.scheduler.TaskLocality
 
nextId() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
nextId() - Static method in class org.apache.spark.TaskState
 
nextId_$eq(int) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
nextId_$eq(int) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
nextId_$eq(int) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
nextId_$eq(int) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
nextId_$eq(int) - Static method in class org.apache.spark.rdd.CheckpointState
 
nextId_$eq(int) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
nextId_$eq(int) - Static method in class org.apache.spark.scheduler.TaskLocality
 
nextId_$eq(int) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
nextId_$eq(int) - Static method in class org.apache.spark.TaskState
 
nextKeyValue() - Method in class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
nextName() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
nextName() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
nextName() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
nextName() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
nextName() - Static method in class org.apache.spark.rdd.CheckpointState
 
nextName() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
nextName() - Static method in class org.apache.spark.scheduler.TaskLocality
 
nextName() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
nextName() - Static method in class org.apache.spark.TaskState
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.rdd.CheckpointState
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.scheduler.TaskLocality
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
nextName_$eq(Iterator<String>) - Static method in class org.apache.spark.TaskState
 
nextValue() - Method in class org.apache.spark.mllib.random.ExponentialGenerator
 
nextValue() - Method in class org.apache.spark.mllib.random.GammaGenerator
 
nextValue() - Method in class org.apache.spark.mllib.random.LogNormalGenerator
 
nextValue() - Method in class org.apache.spark.mllib.random.PoissonGenerator
 
nextValue() - Method in interface org.apache.spark.mllib.random.RandomDataGenerator
Returns an i.i.d.
nextValue() - Method in class org.apache.spark.mllib.random.StandardNormalGenerator
 
nextValue() - Method in class org.apache.spark.mllib.random.UniformGenerator
 
nextValue() - Method in class org.apache.spark.mllib.random.WeibullGenerator
 
NGram - Class in org.apache.spark.ml.feature
:: Experimental :: A feature transformer that converts the input array of strings into an array of n-grams.
NGram(String) - Constructor for class org.apache.spark.ml.feature.NGram
 
NGram() - Constructor for class org.apache.spark.ml.feature.NGram
 
NNLS - Class in org.apache.spark.mllib.optimization
Object used to solve nonnegative least squares problems using a modified projected gradient method.
NNLS() - Constructor for class org.apache.spark.mllib.optimization.NNLS
 
NNLS.Workspace - Class in org.apache.spark.mllib.optimization
 
NNLS.Workspace(int) - Constructor for class org.apache.spark.mllib.optimization.NNLS.Workspace
 
NO_PREF() - Static method in class org.apache.spark.scheduler.TaskLocality
 
NO_RESOURCE - Static variable in class org.apache.spark.launcher.SparkLauncher
A special value for the resource that tells Spark to not try to process the app resource as a file.
Node - Class in org.apache.spark.ml.tree
:: DeveloperApi :: Decision tree node interface.
Node() - Constructor for class org.apache.spark.ml.tree.Node
 
Node - Class in org.apache.spark.mllib.tree.model
:: DeveloperApi :: Node in a decision tree.
Node(int, Predict, double, boolean, Option<Split>, Option<Node>, Option<Node>, Option<InformationGainStats>) - Constructor for class org.apache.spark.mllib.tree.model.Node
 
NODE_LOCAL() - Static method in class org.apache.spark.scheduler.TaskLocality
 
nodeData() - Method in class org.apache.spark.ml.tree.EnsembleModelReadWrite.EnsembleNodeData
 
nodeId() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
nodeName() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
noLocality() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
Nominal() - Static method in class org.apache.spark.ml.attribute.AttributeType
Nominal type.
NominalAttribute - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: A nominal attribute.
NONE - Static variable in class org.apache.spark.api.java.StorageLevels
 
None - Static variable in class org.apache.spark.graphx.TripletFields
None of the triplet fields are exposed.
NONE() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
NONE() - Static method in class org.apache.spark.storage.StorageLevel
 
nonEmpty() - Static method in class org.apache.spark.sql.types.StructType
 
nonLocalPaths(String, boolean) - Static method in class org.apache.spark.util.Utils
Return all non-local paths from a comma-separated list of paths.
nonnegative() - Static method in class org.apache.spark.ml.recommendation.ALS
 
nonNegativeHash(Object) - Static method in class org.apache.spark.util.Utils
 
nonNegativeMod(int, int) - Static method in class org.apache.spark.util.Utils
 
NoopDialect - Class in org.apache.spark.sql.jdbc
NOOP dialect object, always returning the neutral element.
NoopDialect() - Constructor for class org.apache.spark.sql.jdbc.NoopDialect
 
norm(Vector, double) - Static method in class org.apache.spark.ml.linalg.Vectors
Returns the p-norm of this vector.
norm(Vector, double) - Static method in class org.apache.spark.mllib.linalg.Vectors
Returns the p-norm of this vector.
normalizeDuration(long) - Static method in class org.apache.spark.streaming.ui.UIUtils
Find the best TimeUnit for converting milliseconds to a friendly string.
Normalizer - Class in org.apache.spark.ml.feature
:: Experimental :: Normalize a vector to have unit norm using the given p-norm.
Normalizer(String) - Constructor for class org.apache.spark.ml.feature.Normalizer
 
Normalizer() - Constructor for class org.apache.spark.ml.feature.Normalizer
 
Normalizer - Class in org.apache.spark.mllib.feature
Normalizes samples individually to unit L^p^ norm
Normalizer(double) - Constructor for class org.apache.spark.mllib.feature.Normalizer
 
Normalizer() - Constructor for class org.apache.spark.mllib.feature.Normalizer
 
normalizeToProbabilitiesInPlace(DenseVector) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
Normalize a vector of raw predictions to be a multinomial probability vector, in place.
normalJavaRDD(JavaSparkContext, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
normalJavaRDD(JavaSparkContext, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
normalJavaRDD(JavaSparkContext, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
RandomRDDs.normalJavaRDD(org.apache.spark.api.java.JavaSparkContext, long, int, long) with the default number of partitions and the default seed.
normalJavaVectorRDD(JavaSparkContext, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
normalJavaVectorRDD(JavaSparkContext, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
normalJavaVectorRDD(JavaSparkContext, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
normalRDD(SparkContext, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD comprised of i.i.d. samples from the standard normal distribution.
normalVectorRDD(SparkContext, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD[Vector] with vectors containing i.i.d. samples drawn from the standard normal distribution.
normL1() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
L1 norm of each dimension.
normL1() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
L1 norm of each column
normL2() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
L2 (Euclidian) norm of each dimension.
normL2() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Euclidean magnitude of each column
normPdf(double, double, double, double) - Static method in class org.apache.spark.mllib.stat.KernelDensity
Evaluates the PDF of a normal distribution.
not(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
not(Column) - Static method in class org.apache.spark.sql.functions
Inversion of boolean expression, i.e.
Not - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff child is evaluated to false.
Not(Filter) - Constructor for class org.apache.spark.sql.sources.Not
 
notEqual(Object) - Method in class org.apache.spark.sql.Column
Inequality test.
ntile(int) - Static method in class org.apache.spark.sql.functions
Window function: returns the ntile group id (from 1 to n inclusive) in an ordered window partition.
nullable() - Method in class org.apache.spark.sql.catalog.Column
 
nullable() - Method in class org.apache.spark.sql.types.StructField
 
nullDeviance() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
The deviance for the null model.
nullHypothesis() - Method in class org.apache.spark.mllib.stat.test.ChiSqTestResult
 
nullHypothesis() - Method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTestResult
 
nullHypothesis() - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
nullHypothesis() - Method in interface org.apache.spark.mllib.stat.test.TestResult
Null hypothesis of the test.
nullHypothesis() - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
NullType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the NullType object.
NullType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing NULL values.
NUM_ATTRIBUTES() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
NUM_PARTITIONS() - Static method in class org.apache.spark.ui.UIWorkloadGenerator
 
NUM_VALUES() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
numAccums() - Static method in class org.apache.spark.util.AccumulatorContext
Returns the number of accumulators registered.
numActives() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
numActives() - Method in class org.apache.spark.ml.linalg.DenseVector
 
numActives() - Method in interface org.apache.spark.ml.linalg.Matrix
Find the number of values stored explicitly.
numActives() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
numActives() - Method in class org.apache.spark.ml.linalg.SparseVector
 
numActives() - Method in interface org.apache.spark.ml.linalg.Vector
Number of active entries.
numActives() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
numActives() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
numActives() - Method in interface org.apache.spark.mllib.linalg.Matrix
Find the number of values stored explicitly.
numActives() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
numActives() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
numActives() - Method in interface org.apache.spark.mllib.linalg.Vector
Number of active entries.
numActiveStages() - Method in class org.apache.spark.status.api.v1.JobData
 
numActiveStages() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numActiveTasks() - Method in interface org.apache.spark.SparkStageInfo
 
numActiveTasks() - Method in class org.apache.spark.SparkStageInfoImpl
 
numActiveTasks() - Method in class org.apache.spark.status.api.v1.JobData
 
numActiveTasks() - Method in class org.apache.spark.status.api.v1.StageData
 
numActiveTasks() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numActiveTasks() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
numAttributes() - Method in class org.apache.spark.ml.attribute.AttributeGroup
 
numberedTreeString() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
numBins() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
 
numBlocks() - Method in class org.apache.spark.storage.StorageStatus
Return the number of blocks stored in this block manager in O(RDDs) time.
numBuckets() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
numCachedPartitions() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
numCachedPartitions() - Method in class org.apache.spark.storage.RDDInfo
 
numCategories() - Method in class org.apache.spark.ml.tree.CategoricalSplit
 
numCategories() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.SplitData
 
numClasses() - Method in class org.apache.spark.ml.classification.ClassificationModel
Number of classes (values which the label can take).
numClasses() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
numClasses() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
numClasses() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
numClasses() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
numClasses() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
numClasses() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
numClasses() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
numColBlocks() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
numCols() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
numCols() - Method in interface org.apache.spark.ml.linalg.Matrix
Number of columns.
numCols() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
numCols() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
numCols() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
numCols() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Gets or computes the number of columns.
numCols() - Method in interface org.apache.spark.mllib.linalg.distributed.DistributedMatrix
Gets or computes the number of columns.
numCols() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
 
numCols() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Gets or computes the number of columns.
numCols() - Method in interface org.apache.spark.mllib.linalg.Matrix
Number of columns.
numCols() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
numCompletedJobs() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
numCompletedStages() - Method in class org.apache.spark.status.api.v1.JobData
 
numCompletedStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
numCompletedTasks() - Method in interface org.apache.spark.SparkStageInfo
 
numCompletedTasks() - Method in class org.apache.spark.SparkStageInfoImpl
 
numCompletedTasks() - Method in class org.apache.spark.status.api.v1.JobData
 
numCompletedTasks() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numCompleteTasks() - Method in class org.apache.spark.status.api.v1.StageData
 
numCompleteTasks() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
numEdges() - Method in class org.apache.spark.graphx.GraphOps
The number of edges in the graph.
Numeric() - Static method in class org.apache.spark.ml.attribute.AttributeType
Numeric type.
NumericAttribute - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: A numeric attribute with optional summary statistics.
numericColumns() - Method in class org.apache.spark.sql.Dataset
 
NumericParser - Class in org.apache.spark.mllib.util
Simple parser for a numeric structure consisting of three types:
NumericParser() - Constructor for class org.apache.spark.mllib.util.NumericParser
 
numericRDDToDoubleRDDFunctions(RDD<T>, Numeric<T>) - Static method in class org.apache.spark.rdd.RDD
 
NumericType - Class in org.apache.spark.sql.types
:: DeveloperApi :: Numeric data types.
NumericType() - Constructor for class org.apache.spark.sql.types.NumericType
 
numFailedJobs() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
numFailedStages() - Method in class org.apache.spark.status.api.v1.JobData
 
numFailedStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
numFailedStages() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numFailedTasks() - Method in interface org.apache.spark.SparkStageInfo
 
numFailedTasks() - Method in class org.apache.spark.SparkStageInfoImpl
 
numFailedTasks() - Method in class org.apache.spark.status.api.v1.JobData
 
numFailedTasks() - Method in class org.apache.spark.status.api.v1.StageData
 
numFailedTasks() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numFailedTasks() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
numFeatures() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
numFeatures() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
numFeatures() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
numFeatures() - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
numFeatures() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
numFeatures() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
numFeatures() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
numFeatures() - Method in class org.apache.spark.ml.feature.HashingTF
Number of features.
numFeatures() - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
numFeatures() - Method in class org.apache.spark.ml.PredictionModel
Returns the number of features the model was trained on.
numFeatures() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
numFeatures() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
numFeatures() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
numFeatures() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
 
numFeatures() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
numFeatures() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
numFeatures() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
numFeatures() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
numFeatures() - Method in class org.apache.spark.mllib.feature.HashingTF
 
numFeatures() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
The dimension of training features.
numFeatures() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
numFeatures() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
numFeatures() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
numFeatures_$eq(int) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
numFeatures_$eq(int) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
numFeatures_$eq(int) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
numFeatures_$eq(int) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
numFeatures_$eq(int) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
numFolds() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
numFolds() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
numInstances() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Number of instances in DataFrame predictions
numItemBlocks() - Static method in class org.apache.spark.ml.recommendation.ALS
 
numIterations() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionTrainingSummary
 
numIterations() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
numNodes() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
numNodes() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
numNodes() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Get number of nodes in tree, including leaf nodes.
numNonzeros() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
numNonzeros() - Method in class org.apache.spark.ml.linalg.DenseVector
 
numNonzeros() - Method in interface org.apache.spark.ml.linalg.Matrix
Find the number of non-zero active values.
numNonzeros() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
numNonzeros() - Method in class org.apache.spark.ml.linalg.SparseVector
 
numNonzeros() - Method in interface org.apache.spark.ml.linalg.Vector
Number of nonzero elements.
numNonzeros() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
numNonzeros() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
numNonzeros() - Method in interface org.apache.spark.mllib.linalg.Matrix
Find the number of non-zero active values.
numNonzeros() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
numNonzeros() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
numNonzeros() - Method in interface org.apache.spark.mllib.linalg.Vector
Number of nonzero elements.
numNonzeros() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Number of nonzero elements in each dimension.
numNonzeros() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Number of nonzero elements (including explicitly presented zero values) in each column.
numOfLinearPredictor() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
numOfLinearPredictor() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
numOfLinearPredictor() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
In GeneralizedLinearModel, only single linear predictor is allowed for both weights and intercept.
numOfLinearPredictor() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
numOfLinearPredictor() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
numOfLinearPredictor() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
numOfLinearPredictor_$eq(int) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
numOfLinearPredictor_$eq(int) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
numOfLinearPredictor_$eq(int) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
numOfLinearPredictor_$eq(int) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
numOfLinearPredictor_$eq(int) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
numPartitions() - Method in class org.apache.spark.HashPartitioner
 
numPartitions() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
numPartitions() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
numPartitions() - Method in class org.apache.spark.Partitioner
 
numPartitions() - Method in class org.apache.spark.RangePartitioner
 
numPartitions() - Method in class org.apache.spark.rdd.PartitionGroup
 
numPartitions() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
numPartitions() - Method in class org.apache.spark.storage.RDDInfo
 
numPartitions(int) - Method in class org.apache.spark.streaming.StateSpec
Set the number of partitions by which the state RDDs generated by mapWithState will be partitioned.
numRddBlocks() - Method in class org.apache.spark.storage.StorageStatus
Return the number of RDD blocks stored in this block manager in O(RDDs) time.
numRddBlocksById(int) - Method in class org.apache.spark.storage.StorageStatus
Return the number of blocks that belong to the given RDD in O(1) time.
numRecords() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
The number of recorders received by the receivers in this batch.
numRecords() - Method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
numRetries(SparkConf) - Static method in class org.apache.spark.util.RpcUtils
Returns the configured number of times to retry connecting
numRowBlocks() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
numRows() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
numRows() - Method in interface org.apache.spark.ml.linalg.Matrix
Number of rows.
numRows() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
numRows() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
numRows() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
numRows() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Gets or computes the number of rows.
numRows() - Method in interface org.apache.spark.mllib.linalg.distributed.DistributedMatrix
Gets or computes the number of rows.
numRows() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
 
numRows() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Gets or computes the number of rows.
numRows() - Method in interface org.apache.spark.mllib.linalg.Matrix
Number of rows.
numRows() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
numRunningTasks() - Method in interface org.apache.spark.SparkExecutorInfo
 
numRunningTasks() - Method in class org.apache.spark.SparkExecutorInfoImpl
 
numSkippedStages() - Method in class org.apache.spark.status.api.v1.JobData
 
numSkippedStages() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numSkippedTasks() - Method in class org.apache.spark.status.api.v1.JobData
 
numSkippedTasks() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numSpilledStages() - Method in class org.apache.spark.SpillListener
 
numTasks() - Method in class org.apache.spark.scheduler.StageInfo
 
numTasks() - Method in interface org.apache.spark.SparkStageInfo
 
numTasks() - Method in class org.apache.spark.SparkStageInfoImpl
 
numTasks() - Method in class org.apache.spark.status.api.v1.JobData
 
numTasks() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
numTopFeatures() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
numTopFeatures() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
numTopFeatures() - Method in class org.apache.spark.mllib.feature.ChiSqSelector
 
numTrees() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
Number of trees in ensemble
numTrees() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
Deprecated.
Use getNumTrees instead. This method will be removed in 2.1.0
numTrees() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
numTrees() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
Number of trees in ensemble
numTrees() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
Deprecated.
Use getNumTrees instead. This method will be removed in 2.1.0
numTrees() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
numTrees() - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
numTrees() - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
numUserBlocks() - Static method in class org.apache.spark.ml.recommendation.ALS
 
numValues() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
numVertices() - Method in class org.apache.spark.graphx.GraphOps
The number of vertices in the graph.

O

objectFile(String, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and BytesWritable values that contain a serialized partition.
objectFile(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and BytesWritable values that contain a serialized partition.
objectFile(String, int, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and BytesWritable values that contain a serialized partition.
objectiveHistory() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionTrainingSummary
 
objectiveHistory() - Method in interface org.apache.spark.ml.classification.LogisticRegressionTrainingSummary
objective function (scaled loss + regularization) at each iteration.
objectiveHistory() - Method in class org.apache.spark.ml.regression.LinearRegressionTrainingSummary
 
of(T) - Static method in class org.apache.spark.api.java.Optional
 
of(RDD<Tuple2<Object, Object>>) - Static method in class org.apache.spark.mllib.evaluation.AreaUnderCurve
Returns the area under the given curve.
of(Iterable<Tuple2<Object, Object>>) - Static method in class org.apache.spark.mllib.evaluation.AreaUnderCurve
Returns the area under the given curve.
of(JavaRDD<Tuple2<T, T>>) - Static method in class org.apache.spark.mllib.evaluation.RankingMetrics
Creates a RankingMetrics instance (for Java users).
OFF_HEAP - Static variable in class org.apache.spark.api.java.StorageLevels
 
OFF_HEAP() - Static method in class org.apache.spark.storage.StorageLevel
 
offset() - Method in class org.apache.spark.sql.SinkStatus
 
offset() - Method in class org.apache.spark.sql.SourceStatus
 
offset() - Method in class org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset
 
offsetBytes(String, long, long) - Static method in class org.apache.spark.util.Utils
Return a string containing part of a file from byte 'start' to 'end'.
offsetBytes(Seq<File>, long, long) - Static method in class org.apache.spark.util.Utils
Return a string containing data across a set of files.
OffsetRange - Class in org.apache.spark.streaming.kafka
Represents a range of offsets from a single Kafka TopicAndPartition.
offsetRanges() - Method in interface org.apache.spark.streaming.kafka.HasOffsetRanges
 
ofNullable(T) - Static method in class org.apache.spark.api.java.Optional
 
ofRows(SparkSession, LogicalPlan) - Static method in class org.apache.spark.sql.Dataset
 
oldLocalModel() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
oldLocalModel() - Method in class org.apache.spark.ml.clustering.LDAModel
Underlying spark.mllib model.
oldLocalModel() - Method in class org.apache.spark.ml.clustering.LocalLDAModel
 
onApplicationEnd(SparkListenerApplicationEnd) - Method in class org.apache.spark.scheduler.SparkListener
 
onApplicationEnd(SparkListenerApplicationEnd) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onApplicationEnd(SparkListenerApplicationEnd) - Method in class org.apache.spark.SparkFirehoseListener
 
onApplicationEnd(SparkListenerApplicationEnd) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onApplicationStart(SparkListenerApplicationStart) - Method in class org.apache.spark.scheduler.SparkListener
 
onApplicationStart(SparkListenerApplicationStart) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onApplicationStart(SparkListenerApplicationStart) - Method in class org.apache.spark.SparkFirehoseListener
 
onApplicationStart(SparkListenerApplicationStart) - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
onApplicationStart(SparkListenerApplicationStart) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onBatchCompleted(StreamingListenerBatchCompleted) - Method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onBatchCompleted(StreamingListenerBatchCompleted) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when processing of a batch of jobs has completed.
onBatchStarted(StreamingListenerBatchStarted) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onBatchStarted(StreamingListenerBatchStarted) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when processing of a batch of jobs has started.
onBatchSubmitted(StreamingListenerBatchSubmitted) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onBatchSubmitted(StreamingListenerBatchSubmitted) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when a batch of jobs has been submitted for processing.
onBlockManagerAdded(SparkListenerBlockManagerAdded) - Method in class org.apache.spark.scheduler.SparkListener
 
onBlockManagerAdded(SparkListenerBlockManagerAdded) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onBlockManagerAdded(SparkListenerBlockManagerAdded) - Method in class org.apache.spark.SparkFirehoseListener
 
onBlockManagerAdded(SparkListenerBlockManagerAdded) - Method in class org.apache.spark.storage.StorageStatusListener
 
onBlockManagerAdded(SparkListenerBlockManagerAdded) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onBlockManagerRemoved(SparkListenerBlockManagerRemoved) - Method in class org.apache.spark.scheduler.SparkListener
 
onBlockManagerRemoved(SparkListenerBlockManagerRemoved) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onBlockManagerRemoved(SparkListenerBlockManagerRemoved) - Method in class org.apache.spark.SparkFirehoseListener
 
onBlockManagerRemoved(SparkListenerBlockManagerRemoved) - Method in class org.apache.spark.storage.StorageStatusListener
 
onBlockManagerRemoved(SparkListenerBlockManagerRemoved) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onBlockUpdated(SparkListenerBlockUpdated) - Method in class org.apache.spark.scheduler.SparkListener
 
onBlockUpdated(SparkListenerBlockUpdated) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onBlockUpdated(SparkListenerBlockUpdated) - Method in class org.apache.spark.SparkFirehoseListener
 
onBlockUpdated(SparkListenerBlockUpdated) - Method in class org.apache.spark.storage.StorageStatusListener
 
onBlockUpdated(SparkListenerBlockUpdated) - Method in class org.apache.spark.ui.storage.StorageListener
 
OnceParser(Function1<Reader<Object>, Parsers.ParseResult<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
onComplete(Function1<Try<T>, U>, ExecutionContext) - Method in class org.apache.spark.ComplexFutureAction
 
onComplete(Function1<Try<T>, U>, ExecutionContext) - Method in interface org.apache.spark.FutureAction
When this action is completed, either through an exception, or a value, applies the provided function.
onComplete(Function1<R, BoxedUnit>) - Method in class org.apache.spark.partial.PartialResult
Set a handler to be called when this PartialResult completes.
onComplete(Function1<Try<T>, U>, ExecutionContext) - Method in class org.apache.spark.SimpleFutureAction
 
OneHotEncoder - Class in org.apache.spark.ml.feature
:: Experimental :: A one-hot encoder that maps a column of category indices to a column of binary vectors, with at most a single one-value per row that indicates the input category index.
OneHotEncoder(String) - Constructor for class org.apache.spark.ml.feature.OneHotEncoder
 
OneHotEncoder() - Constructor for class org.apache.spark.ml.feature.OneHotEncoder
 
onEnvironmentUpdate(SparkListenerEnvironmentUpdate) - Method in class org.apache.spark.scheduler.SparkListener
 
onEnvironmentUpdate(SparkListenerEnvironmentUpdate) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onEnvironmentUpdate(SparkListenerEnvironmentUpdate) - Method in class org.apache.spark.SparkFirehoseListener
 
onEnvironmentUpdate(SparkListenerEnvironmentUpdate) - Method in class org.apache.spark.ui.env.EnvironmentListener
 
onEnvironmentUpdate(SparkListenerEnvironmentUpdate) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
ones(int, int) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
Generate a DenseMatrix consisting of ones.
ones(int, int) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a DenseMatrix consisting of ones.
ones(int, int) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate a DenseMatrix consisting of ones.
ones(int, int) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a DenseMatrix consisting of ones.
OneSampleTwoSided() - Method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest.NullHypothesis$
 
OneToOneDependency<T> - Class in org.apache.spark
:: DeveloperApi :: Represents a one-to-one dependency between partitions of the parent and child RDDs.
OneToOneDependency(RDD<T>) - Constructor for class org.apache.spark.OneToOneDependency
 
onEvent(SparkListenerEvent) - Method in class org.apache.spark.SparkFirehoseListener
 
OneVsRest - Class in org.apache.spark.ml.classification
:: Experimental ::
OneVsRest(String) - Constructor for class org.apache.spark.ml.classification.OneVsRest
 
OneVsRest() - Constructor for class org.apache.spark.ml.classification.OneVsRest
 
OneVsRestModel - Class in org.apache.spark.ml.classification
:: Experimental :: Model produced by OneVsRest.
onExecutorAdded(SparkListenerExecutorAdded) - Method in class org.apache.spark.scheduler.SparkListener
 
onExecutorAdded(SparkListenerExecutorAdded) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onExecutorAdded(SparkListenerExecutorAdded) - Method in class org.apache.spark.SparkFirehoseListener
 
onExecutorAdded(SparkListenerExecutorAdded) - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate) - Method in class org.apache.spark.scheduler.SparkListener
 
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate) - Method in class org.apache.spark.SparkFirehoseListener
 
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onExecutorRemoved(SparkListenerExecutorRemoved) - Method in class org.apache.spark.scheduler.SparkListener
 
onExecutorRemoved(SparkListenerExecutorRemoved) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onExecutorRemoved(SparkListenerExecutorRemoved) - Method in class org.apache.spark.SparkFirehoseListener
 
onExecutorRemoved(SparkListenerExecutorRemoved) - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
onFail(Function1<Exception, BoxedUnit>) - Method in class org.apache.spark.partial.PartialResult
Set a handler to be called if this PartialResult's job fails.
onFailure(String, QueryExecution, Exception) - Method in interface org.apache.spark.sql.util.QueryExecutionListener
A callback function that will be called when a query execution failed.
onJobEnd(SparkListenerJobEnd) - Method in class org.apache.spark.scheduler.SparkListener
 
onJobEnd(SparkListenerJobEnd) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onJobEnd(SparkListenerJobEnd) - Method in class org.apache.spark.SparkFirehoseListener
 
onJobEnd(SparkListenerJobEnd) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onJobStart(SparkListenerJobStart) - Method in class org.apache.spark.scheduler.SparkListener
 
onJobStart(SparkListenerJobStart) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onJobStart(SparkListenerJobStart) - Method in class org.apache.spark.SparkFirehoseListener
 
onJobStart(SparkListenerJobStart) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
OnlineLDAOptimizer - Class in org.apache.spark.mllib.clustering
:: DeveloperApi ::
OnlineLDAOptimizer() - Constructor for class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
 
onOtherEvent(SparkListenerEvent) - Method in class org.apache.spark.scheduler.SparkListener
 
onOtherEvent(SparkListenerEvent) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onOtherEvent(SparkListenerEvent) - Method in class org.apache.spark.SparkFirehoseListener
 
onOutputOperationCompleted(StreamingListenerOutputOperationCompleted) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onOutputOperationCompleted(StreamingListenerOutputOperationCompleted) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when processing of a job of a batch has completed.
onOutputOperationStarted(StreamingListenerOutputOperationStarted) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onOutputOperationStarted(StreamingListenerOutputOperationStarted) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when processing of a job of a batch has started.
onQueryProgress(ContinuousQueryListener.QueryProgress) - Method in class org.apache.spark.sql.util.ContinuousQueryListener
Called when there is some status update (ingestion rate updated, etc.)
onQueryStarted(ContinuousQueryListener.QueryStarted) - Method in class org.apache.spark.sql.util.ContinuousQueryListener
Called when a query is started.
onQueryTerminated(ContinuousQueryListener.QueryTerminated) - Method in class org.apache.spark.sql.util.ContinuousQueryListener
Called when a query is stopped, with or without error
onReceiverError(StreamingListenerReceiverError) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onReceiverError(StreamingListenerReceiverError) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when a receiver has reported an error
onReceiverStarted(StreamingListenerReceiverStarted) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onReceiverStarted(StreamingListenerReceiverStarted) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when a receiver has been started
onReceiverStopped(StreamingListenerReceiverStopped) - Static method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
onReceiverStopped(StreamingListenerReceiverStopped) - Method in interface org.apache.spark.streaming.scheduler.StreamingListener
Called when a receiver has been stopped
onStageCompleted(SparkListenerStageCompleted) - Method in class org.apache.spark.scheduler.SparkListener
 
onStageCompleted(SparkListenerStageCompleted) - Method in class org.apache.spark.scheduler.StatsReportListener
 
onStageCompleted(SparkListenerStageCompleted) - Method in class org.apache.spark.SparkFirehoseListener
 
onStageCompleted(SparkListenerStageCompleted) - Method in class org.apache.spark.SpillListener
 
onStageCompleted(SparkListenerStageCompleted) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onStageCompleted(SparkListenerStageCompleted) - Method in class org.apache.spark.ui.storage.StorageListener
 
onStageSubmitted(SparkListenerStageSubmitted) - Method in class org.apache.spark.scheduler.SparkListener
 
onStageSubmitted(SparkListenerStageSubmitted) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onStageSubmitted(SparkListenerStageSubmitted) - Method in class org.apache.spark.SparkFirehoseListener
 
onStageSubmitted(SparkListenerStageSubmitted) - Method in class org.apache.spark.ui.jobs.JobProgressListener
For FIFO, all stages are contained by "default" pool but "default" pool here is meaningless
onStageSubmitted(SparkListenerStageSubmitted) - Method in class org.apache.spark.ui.storage.StorageListener
 
OnStart - Class in org.apache.spark.rpc.netty
 
OnStart() - Constructor for class org.apache.spark.rpc.netty.OnStart
 
onStart() - Method in class org.apache.spark.streaming.receiver.Receiver
This method is called by the system when the receiver is started.
OnStop - Class in org.apache.spark.rpc.netty
 
OnStop() - Constructor for class org.apache.spark.rpc.netty.OnStop
 
onStop() - Method in class org.apache.spark.streaming.receiver.Receiver
This method is called by the system when the receiver is stopped.
onSuccess(String, QueryExecution, long) - Method in interface org.apache.spark.sql.util.QueryExecutionListener
A callback function that will be called when a query executed successfully.
onTaskCompletion(TaskContext) - Method in interface org.apache.spark.util.TaskCompletionListener
 
onTaskEnd(SparkListenerTaskEnd) - Method in class org.apache.spark.scheduler.SparkListener
 
onTaskEnd(SparkListenerTaskEnd) - Method in class org.apache.spark.scheduler.StatsReportListener
 
onTaskEnd(SparkListenerTaskEnd) - Method in class org.apache.spark.SparkFirehoseListener
 
onTaskEnd(SparkListenerTaskEnd) - Method in class org.apache.spark.SpillListener
 
onTaskEnd(SparkListenerTaskEnd) - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
onTaskEnd(SparkListenerTaskEnd) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onTaskFailure(TaskContext, Throwable) - Method in interface org.apache.spark.util.TaskFailureListener
 
onTaskGettingResult(SparkListenerTaskGettingResult) - Method in class org.apache.spark.scheduler.SparkListener
 
onTaskGettingResult(SparkListenerTaskGettingResult) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onTaskGettingResult(SparkListenerTaskGettingResult) - Method in class org.apache.spark.SparkFirehoseListener
 
onTaskGettingResult(SparkListenerTaskGettingResult) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onTaskStart(SparkListenerTaskStart) - Method in class org.apache.spark.scheduler.SparkListener
 
onTaskStart(SparkListenerTaskStart) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onTaskStart(SparkListenerTaskStart) - Method in class org.apache.spark.SparkFirehoseListener
 
onTaskStart(SparkListenerTaskStart) - Method in class org.apache.spark.ui.exec.ExecutorsListener
 
onTaskStart(SparkListenerTaskStart) - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
onUnpersistRDD(SparkListenerUnpersistRDD) - Method in class org.apache.spark.scheduler.SparkListener
 
onUnpersistRDD(SparkListenerUnpersistRDD) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
onUnpersistRDD(SparkListenerUnpersistRDD) - Method in class org.apache.spark.SparkFirehoseListener
 
onUnpersistRDD(SparkListenerUnpersistRDD) - Method in class org.apache.spark.storage.StorageStatusListener
 
onUnpersistRDD(SparkListenerUnpersistRDD) - Method in class org.apache.spark.ui.storage.StorageListener
 
OOM() - Static method in class org.apache.spark.util.SparkExitCode
The default uncaught exception handler was reached, and the uncaught exception was an
open() - Method in class org.apache.spark.input.PortableDataStream
Create a new DataInputStream from the split and context.
ops() - Method in class org.apache.spark.graphx.Graph
The associated GraphOps object.
ops() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
opt(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
optimize(RDD<Tuple2<Object, Vector>>, Vector) - Method in class org.apache.spark.mllib.optimization.GradientDescent
:: DeveloperApi :: Runs gradient descent on the given training data.
optimize(RDD<Tuple2<Object, Vector>>, Vector) - Method in class org.apache.spark.mllib.optimization.LBFGS
 
optimize(RDD<Tuple2<Object, Vector>>, Vector) - Method in interface org.apache.spark.mllib.optimization.Optimizer
Solve the provided convex optimization problem.
optimizeDocConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
optimizeDocConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
optimizeDocConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
optimizer() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
optimizer() - Static method in class org.apache.spark.ml.clustering.LDA
 
optimizer() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
optimizer() - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
 
optimizer() - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
optimizer() - Method in class org.apache.spark.mllib.classification.SVMWithSGD
 
Optimizer - Interface in org.apache.spark.mllib.optimization
:: DeveloperApi :: Trait for optimization problem solvers.
optimizer() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
The optimizer to solve the problem.
optimizer() - Method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
optimizer() - Method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
optimizer() - Method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
option(String, String) - Method in class org.apache.spark.sql.DataFrameReader
Adds an input option for the underlying data source.
option(String, boolean) - Method in class org.apache.spark.sql.DataFrameReader
Adds an input option for the underlying data source.
option(String, long) - Method in class org.apache.spark.sql.DataFrameReader
Adds an input option for the underlying data source.
option(String, double) - Method in class org.apache.spark.sql.DataFrameReader
Adds an input option for the underlying data source.
option(String, String) - Method in class org.apache.spark.sql.DataFrameWriter
Adds an output option for the underlying data source.
option(String, boolean) - Method in class org.apache.spark.sql.DataFrameWriter
Adds an output option for the underlying data source.
option(String, long) - Method in class org.apache.spark.sql.DataFrameWriter
Adds an output option for the underlying data source.
option(String, double) - Method in class org.apache.spark.sql.DataFrameWriter
Adds an output option for the underlying data source.
Optional<T> - Class in org.apache.spark.api.java
Like java.util.Optional in Java 8, scala.Option in Scala, and com.google.common.base.Optional in Google Guava, this class represents a value of a given type that may or may not exist.
options(Map<String, String>) - Method in class org.apache.spark.sql.DataFrameReader
(Scala-specific) Adds input options for the underlying data source.
options(Map<String, String>) - Method in class org.apache.spark.sql.DataFrameReader
Adds input options for the underlying data source.
options(Map<String, String>) - Method in class org.apache.spark.sql.DataFrameWriter
(Scala-specific) Adds output options for the underlying data source.
options(Map<String, String>) - Method in class org.apache.spark.sql.DataFrameWriter
Adds output options for the underlying data source.
optionToOptional(Option<T>) - Static method in class org.apache.spark.api.java.JavaUtils
 
or(T) - Method in class org.apache.spark.api.java.Optional
 
or(Column) - Method in class org.apache.spark.sql.Column
Boolean OR.
Or - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff at least one of left or right evaluates to true.
Or(Filter, Filter) - Constructor for class org.apache.spark.sql.sources.Or
 
OracleDialect - Class in org.apache.spark.sql.jdbc
 
OracleDialect() - Constructor for class org.apache.spark.sql.jdbc.OracleDialect
 
orc(String) - Method in class org.apache.spark.sql.DataFrameReader
Loads an ORC file and returns the result as a DataFrame.
orc(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame in ORC format at the specified path.
ORC_COMPRESSION() - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
OrcFileOperator - Class in org.apache.spark.sql.hive.orc
 
OrcFileOperator() - Constructor for class org.apache.spark.sql.hive.orc.OrcFileOperator
 
OrcFilters - Class in org.apache.spark.sql.hive.orc
Helper object for building ORC SearchArguments, which are used for ORC predicate push-down.
OrcFilters() - Constructor for class org.apache.spark.sql.hive.orc.OrcFilters
 
OrcRelation - Class in org.apache.spark.sql.hive.orc
 
OrcRelation() - Constructor for class org.apache.spark.sql.hive.orc.OrcRelation
 
orderBy(String, String...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the given expressions.
orderBy(Column...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the given expressions.
orderBy(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the given expressions.
orderBy(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the given expressions.
orderBy(String, String...) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the ordering defined.
orderBy(Column...) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the ordering defined.
orderBy(String, Seq<String>) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the ordering defined.
orderBy(Seq<Column>) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the ordering defined.
orderBy(String, String...) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the ordering columns in a WindowSpec.
orderBy(Column...) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the ordering columns in a WindowSpec.
orderBy(String, Seq<String>) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the ordering columns in a WindowSpec.
orderBy(Seq<Column>) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the ordering columns in a WindowSpec.
OrderedRDDFunctions<K,V,P extends scala.Product2<K,V>> - Class in org.apache.spark.rdd
Extra functions available on RDDs of (key, value) pairs where the key is sortable through an implicit conversion.
OrderedRDDFunctions(RDD<P>, Ordering<K>, ClassTag<K>, ClassTag<V>, ClassTag<P>) - Constructor for class org.apache.spark.rdd.OrderedRDDFunctions
 
ordering() - Static method in class org.apache.spark.streaming.Time
 
ORDINAL() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
orElse(T) - Method in class org.apache.spark.api.java.Optional
 
orElse(PartialFunction<A1, B1>) - Static method in class org.apache.spark.sql.types.StructType
 
org.apache.hadoop.hive.ql.io.orc - package org.apache.hadoop.hive.ql.io.orc
 
org.apache.spark - package org.apache.spark
Core Spark classes in Scala.
org.apache.spark.api.java - package org.apache.spark.api.java
Spark Java programming APIs.
org.apache.spark.api.java.function - package org.apache.spark.api.java.function
Set of interfaces to represent functions in Spark's Java API.
org.apache.spark.api.r - package org.apache.spark.api.r
 
org.apache.spark.broadcast - package org.apache.spark.broadcast
Spark's broadcast variables, used to broadcast immutable datasets to all nodes.
org.apache.spark.graphx - package org.apache.spark.graphx
ALPHA COMPONENT GraphX is a graph processing framework built on top of Spark.
org.apache.spark.graphx.impl - package org.apache.spark.graphx.impl
 
org.apache.spark.graphx.lib - package org.apache.spark.graphx.lib
Various analytics functions for graphs.
org.apache.spark.graphx.util - package org.apache.spark.graphx.util
Collections of utilities used by graphx.
org.apache.spark.input - package org.apache.spark.input
 
org.apache.spark.internal.config - package org.apache.spark.internal.config
 
org.apache.spark.io - package org.apache.spark.io
IO codecs used for compression.
org.apache.spark.launcher - package org.apache.spark.launcher
Library for launching Spark applications.
org.apache.spark.mapred - package org.apache.spark.mapred
 
org.apache.spark.ml - package org.apache.spark.ml
Spark ML is a component that adds a new set of machine learning APIs to let users quickly assemble and configure practical machine learning pipelines.
org.apache.spark.ml.ann - package org.apache.spark.ml.ann
 
org.apache.spark.ml.attribute - package org.apache.spark.ml.attribute
ML attributes
org.apache.spark.ml.classification - package org.apache.spark.ml.classification
 
org.apache.spark.ml.clustering - package org.apache.spark.ml.clustering
 
org.apache.spark.ml.evaluation - package org.apache.spark.ml.evaluation
 
org.apache.spark.ml.feature - package org.apache.spark.ml.feature
Feature transformers The `ml.feature` package provides common feature transformers that help convert raw data or features into more suitable forms for model fitting.
org.apache.spark.ml.impl - package org.apache.spark.ml.impl
 
org.apache.spark.ml.linalg - package org.apache.spark.ml.linalg
 
org.apache.spark.ml.param - package org.apache.spark.ml.param
 
org.apache.spark.ml.param.shared - package org.apache.spark.ml.param.shared
 
org.apache.spark.ml.r - package org.apache.spark.ml.r
 
org.apache.spark.ml.recommendation - package org.apache.spark.ml.recommendation
 
org.apache.spark.ml.regression - package org.apache.spark.ml.regression
 
org.apache.spark.ml.source.libsvm - package org.apache.spark.ml.source.libsvm
 
org.apache.spark.ml.stat.distribution - package org.apache.spark.ml.stat.distribution
 
org.apache.spark.ml.tree - package org.apache.spark.ml.tree
 
org.apache.spark.ml.tree.impl - package org.apache.spark.ml.tree.impl
 
org.apache.spark.ml.tuning - package org.apache.spark.ml.tuning
 
org.apache.spark.ml.util - package org.apache.spark.ml.util
 
org.apache.spark.mllib.classification - package org.apache.spark.mllib.classification
 
org.apache.spark.mllib.classification.impl - package org.apache.spark.mllib.classification.impl
 
org.apache.spark.mllib.clustering - package org.apache.spark.mllib.clustering
 
org.apache.spark.mllib.evaluation - package org.apache.spark.mllib.evaluation
 
org.apache.spark.mllib.evaluation.binary - package org.apache.spark.mllib.evaluation.binary
 
org.apache.spark.mllib.feature - package org.apache.spark.mllib.feature
 
org.apache.spark.mllib.fpm - package org.apache.spark.mllib.fpm
 
org.apache.spark.mllib.linalg - package org.apache.spark.mllib.linalg
 
org.apache.spark.mllib.linalg.distributed - package org.apache.spark.mllib.linalg.distributed
 
org.apache.spark.mllib.optimization - package org.apache.spark.mllib.optimization
 
org.apache.spark.mllib.pmml - package org.apache.spark.mllib.pmml
 
org.apache.spark.mllib.pmml.export - package org.apache.spark.mllib.pmml.export
 
org.apache.spark.mllib.random - package org.apache.spark.mllib.random
 
org.apache.spark.mllib.rdd - package org.apache.spark.mllib.rdd
 
org.apache.spark.mllib.recommendation - package org.apache.spark.mllib.recommendation
 
org.apache.spark.mllib.regression - package org.apache.spark.mllib.regression
 
org.apache.spark.mllib.regression.impl - package org.apache.spark.mllib.regression.impl
 
org.apache.spark.mllib.stat - package org.apache.spark.mllib.stat
 
org.apache.spark.mllib.stat.correlation - package org.apache.spark.mllib.stat.correlation
 
org.apache.spark.mllib.stat.distribution - package org.apache.spark.mllib.stat.distribution
 
org.apache.spark.mllib.stat.test - package org.apache.spark.mllib.stat.test
 
org.apache.spark.mllib.tree - package org.apache.spark.mllib.tree
 
org.apache.spark.mllib.tree.configuration - package org.apache.spark.mllib.tree.configuration
 
org.apache.spark.mllib.tree.impurity - package org.apache.spark.mllib.tree.impurity
 
org.apache.spark.mllib.tree.loss - package org.apache.spark.mllib.tree.loss
 
org.apache.spark.mllib.tree.model - package org.apache.spark.mllib.tree.model
 
org.apache.spark.mllib.util - package org.apache.spark.mllib.util
 
org.apache.spark.partial - package org.apache.spark.partial
 
org.apache.spark.rdd - package org.apache.spark.rdd
Provides implementation's of various RDDs.
org.apache.spark.rpc.netty - package org.apache.spark.rpc.netty
 
org.apache.spark.scheduler - package org.apache.spark.scheduler
Spark's DAG scheduler.
org.apache.spark.scheduler.cluster - package org.apache.spark.scheduler.cluster
 
org.apache.spark.scheduler.cluster.mesos - package org.apache.spark.scheduler.cluster.mesos
 
org.apache.spark.scheduler.local - package org.apache.spark.scheduler.local
 
org.apache.spark.security - package org.apache.spark.security
 
org.apache.spark.serializer - package org.apache.spark.serializer
Pluggable serializers for RDD and shuffle data.
org.apache.spark.sql - package org.apache.spark.sql
 
org.apache.spark.sql.api.java - package org.apache.spark.sql.api.java
Allows the execution of relational queries, including those expressed in SQL using Spark.
org.apache.spark.sql.api.r - package org.apache.spark.sql.api.r
 
org.apache.spark.sql.catalog - package org.apache.spark.sql.catalog
 
org.apache.spark.sql.expressions - package org.apache.spark.sql.expressions
 
org.apache.spark.sql.expressions.javalang - package org.apache.spark.sql.expressions.javalang
 
org.apache.spark.sql.expressions.scalalang - package org.apache.spark.sql.expressions.scalalang
 
org.apache.spark.sql.hive - package org.apache.spark.sql.hive
 
org.apache.spark.sql.hive.execution - package org.apache.spark.sql.hive.execution
 
org.apache.spark.sql.hive.orc - package org.apache.spark.sql.hive.orc
 
org.apache.spark.sql.internal - package org.apache.spark.sql.internal
All classes in this package are considered an internal API to Spark and are subject to change between minor releases.
org.apache.spark.sql.jdbc - package org.apache.spark.sql.jdbc
 
org.apache.spark.sql.sources - package org.apache.spark.sql.sources
 
org.apache.spark.sql.types - package org.apache.spark.sql.types
 
org.apache.spark.sql.util - package org.apache.spark.sql.util
 
org.apache.spark.status.api.v1 - package org.apache.spark.status.api.v1
 
org.apache.spark.storage - package org.apache.spark.storage
 
org.apache.spark.storage.memory - package org.apache.spark.storage.memory
 
org.apache.spark.streaming - package org.apache.spark.streaming
 
org.apache.spark.streaming.api.java - package org.apache.spark.streaming.api.java
Java APIs for spark streaming.
org.apache.spark.streaming.dstream - package org.apache.spark.streaming.dstream
Various implementations of DStreams.
org.apache.spark.streaming.flume - package org.apache.spark.streaming.flume
Spark streaming receiver for Flume.
org.apache.spark.streaming.kafka - package org.apache.spark.streaming.kafka
Kafka receiver for spark streaming.
org.apache.spark.streaming.kinesis - package org.apache.spark.streaming.kinesis
 
org.apache.spark.streaming.receiver - package org.apache.spark.streaming.receiver
 
org.apache.spark.streaming.scheduler - package org.apache.spark.streaming.scheduler
 
org.apache.spark.streaming.ui - package org.apache.spark.streaming.ui
 
org.apache.spark.streaming.util - package org.apache.spark.streaming.util
 
org.apache.spark.ui - package org.apache.spark.ui
 
org.apache.spark.ui.env - package org.apache.spark.ui.env
 
org.apache.spark.ui.exec - package org.apache.spark.ui.exec
 
org.apache.spark.ui.jobs - package org.apache.spark.ui.jobs
 
org.apache.spark.ui.storage - package org.apache.spark.ui.storage
 
org.apache.spark.util - package org.apache.spark.util
Spark utilities.
org.apache.spark.util.io - package org.apache.spark.util.io
 
org.apache.spark.util.random - package org.apache.spark.util.random
Utilities for random number generation.
org.apache.spark.util.sketch - package org.apache.spark.util.sketch
 
origin() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
originalMax() - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
originalMin() - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
orNull() - Method in class org.apache.spark.api.java.Optional
 
other() - Method in class org.apache.spark.scheduler.RuntimePercentage
 
otherCopyArgs() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
otherVertexAttr(long) - Method in class org.apache.spark.graphx.EdgeTriplet
Given one vertex in the edge return the other vertex.
otherVertexId(long) - Method in class org.apache.spark.graphx.Edge
Given one vertex in the edge return the other vertex.
otherwise(Object) - Method in class org.apache.spark.sql.Column
Evaluates a list of conditions and returns one of multiple possible result expressions.
Out() - Static method in class org.apache.spark.graphx.EdgeDirection
Edges originating from a vertex.
outDegrees() - Method in class org.apache.spark.graphx.GraphOps
The out-degree of each vertex in the graph.
outerJoinVertices(RDD<Tuple2<Object, U>>, Function3<Object, VD, Option<U>, VD2>, ClassTag<U>, ClassTag<VD2>, Predef.$eq$colon$eq<VD, VD2>) - Method in class org.apache.spark.graphx.Graph
Joins the vertices with entries in the table RDD and merges the results using mapFunc.
outerJoinVertices(RDD<Tuple2<Object, U>>, Function3<Object, VD, Option<U>, VD2>, ClassTag<U>, ClassTag<VD2>, Predef.$eq$colon$eq<VD, VD2>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
outerJoinVertices$default$5(RDD<Tuple2<Object, U>>, Function3<Object, VD, Option<U>, VD2>) - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
output() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
OUTPUT() - Static method in class org.apache.spark.ui.ToolTips
 
OUTPUT_METRICS_PREFIX() - Static method in class org.apache.spark.InternalAccumulator
 
outputBytes() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
outputBytes() - Method in class org.apache.spark.status.api.v1.StageData
 
outputBytes() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
outputBytes() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
outputCol() - Static method in class org.apache.spark.ml.feature.Binarizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
outputCol() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.DCT
 
outputCol() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
outputCol() - Static method in class org.apache.spark.ml.feature.HashingTF
 
outputCol() - Static method in class org.apache.spark.ml.feature.IDF
 
outputCol() - Static method in class org.apache.spark.ml.feature.IDFModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.IndexToString
 
outputCol() - Static method in class org.apache.spark.ml.feature.Interaction
 
outputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
outputCol() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
outputCol() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.NGram
 
outputCol() - Static method in class org.apache.spark.ml.feature.Normalizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
outputCol() - Static method in class org.apache.spark.ml.feature.PCA
 
outputCol() - Static method in class org.apache.spark.ml.feature.PCAModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
outputCol() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
outputCol() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
outputCol() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
outputCol() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
outputCol() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
outputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
outputCol() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
outputCol() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
outputCol() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
outputCol() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
OutputCommitCoordinationMessage - Interface in org.apache.spark.scheduler
 
outputCommitCoordinator() - Method in class org.apache.spark.SparkEnv
 
outputDataType() - Method in class org.apache.spark.ml.feature.DCT
 
outputDataType() - Method in class org.apache.spark.ml.feature.ElementwiseProduct
 
outputDataType() - Method in class org.apache.spark.ml.feature.NGram
 
outputDataType() - Method in class org.apache.spark.ml.feature.Normalizer
 
outputDataType() - Method in class org.apache.spark.ml.feature.PolynomialExpansion
 
outputDataType() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
outputDataType() - Method in class org.apache.spark.ml.feature.Tokenizer
 
outputDataType() - Method in class org.apache.spark.ml.UnaryTransformer
Returns the data type of the output column.
outputEncoder() - Method in class org.apache.spark.sql.expressions.Aggregator
Specifies the Encoder for the final ouput value type.
outputFormat() - Method in class org.apache.spark.sql.internal.HiveSerDe
 
OutputMetricDistributions - Class in org.apache.spark.status.api.v1
 
OutputMetrics - Class in org.apache.spark.status.api.v1
 
outputMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
outputMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
OutputOperationInfo - Class in org.apache.spark.streaming.scheduler
:: DeveloperApi :: Class having information on output operations.
OutputOperationInfo(Time, int, String, String, Option<Object>, Option<Object>, Option<String>) - Constructor for class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
outputOperationInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
outputOperationInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
outputOperationInfos() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
 
outputOrdering() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
outputPartitioning() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
outputRecords() - Method in class org.apache.spark.status.api.v1.StageData
 
outputRecords() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
outputRecords() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
outputSet() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
over(WindowSpec) - Method in class org.apache.spark.sql.Column
Define a windowing column.
overwrite() - Method in class org.apache.spark.ml.util.MLWriter
Overwrites if the output path already exists.
overwrite() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 

P

p() - Method in class org.apache.spark.ml.feature.Normalizer
Normalization in L^p^ space.
padTo(int, B, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
pageRank(double, double) - Method in class org.apache.spark.graphx.GraphOps
Run a dynamic version of PageRank returning a graph with vertex attributes containing the PageRank and edge attributes containing the normalized edge weight.
PageRank - Class in org.apache.spark.graphx.lib
PageRank algorithm implementation.
PageRank() - Constructor for class org.apache.spark.graphx.lib.PageRank
 
PairDStreamFunctions<K,V> - Class in org.apache.spark.streaming.dstream
Extra functions available on DStream of (key, value) pairs through an implicit conversion.
PairDStreamFunctions(DStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>, Ordering<K>) - Constructor for class org.apache.spark.streaming.dstream.PairDStreamFunctions
 
PairFlatMapFunction<T,K,V> - Interface in org.apache.spark.api.java.function
A function that returns zero or more key-value pair records from each input record.
PairFunction<T,K,V> - Interface in org.apache.spark.api.java.function
A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs.
PairRDDFunctions<K,V> - Class in org.apache.spark.rdd
Extra functions available on RDDs of (key, value) pairs through an implicit conversion.
PairRDDFunctions(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>, Ordering<K>) - Constructor for class org.apache.spark.rdd.PairRDDFunctions
 
PairwiseRRDD<T> - Class in org.apache.spark.api.r
Form an RDD[(Int, Array[Byte])] from key-value pairs returned from R.
PairwiseRRDD(RDD<T>, int, byte[], String, byte[], Object[], ClassTag<T>) - Constructor for class org.apache.spark.api.r.PairwiseRRDD
 
par() - Static method in class org.apache.spark.sql.types.StructType
 
parallelize(List<T>, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Distribute a local Scala collection to form an RDD.
parallelize(List<T>) - Method in class org.apache.spark.api.java.JavaSparkContext
Distribute a local Scala collection to form an RDD.
parallelize(Seq<T>, int, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Distribute a local Scala collection to form an RDD.
parallelizeDoubles(List<Double>, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Distribute a local Scala collection to form an RDD.
parallelizeDoubles(List<Double>) - Method in class org.apache.spark.api.java.JavaSparkContext
Distribute a local Scala collection to form an RDD.
parallelizePairs(List<Tuple2<K, V>>, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Distribute a local Scala collection to form an RDD.
parallelizePairs(List<Tuple2<K, V>>) - Method in class org.apache.spark.api.java.JavaSparkContext
Distribute a local Scala collection to form an RDD.
Param<T> - Class in org.apache.spark.ml.param
:: DeveloperApi :: A param with self-contained documentation and optionally default value.
Param(String, String, String, Function1<T, Object>) - Constructor for class org.apache.spark.ml.param.Param
 
Param(Identifiable, String, String, Function1<T, Object>) - Constructor for class org.apache.spark.ml.param.Param
 
Param(String, String, String) - Constructor for class org.apache.spark.ml.param.Param
 
Param(Identifiable, String, String) - Constructor for class org.apache.spark.ml.param.Param
 
param() - Method in class org.apache.spark.ml.param.ParamPair
 
ParamGridBuilder - Class in org.apache.spark.ml.tuning
:: Experimental :: Builder for a param grid used in grid search-based model selection.
ParamGridBuilder() - Constructor for class org.apache.spark.ml.tuning.ParamGridBuilder
 
ParamMap - Class in org.apache.spark.ml.param
:: Experimental :: A param to value map.
ParamMap() - Constructor for class org.apache.spark.ml.param.ParamMap
Creates an empty param map.
paramMap() - Method in interface org.apache.spark.ml.param.Params
Internal param map for user-supplied values.
ParamPair<T> - Class in org.apache.spark.ml.param
:: Experimental :: A param and its value.
ParamPair(Param<T>, T) - Constructor for class org.apache.spark.ml.param.ParamPair
 
params() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
params() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
params() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
params() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
params() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
params() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
params() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
params() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
params() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
params() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
params() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
params() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
params() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
params() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
params() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
params() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
params() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
params() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
params() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
params() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
params() - Static method in class org.apache.spark.ml.clustering.KMeans
 
params() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
params() - Static method in class org.apache.spark.ml.clustering.LDA
 
params() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
params() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
params() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
params() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
params() - Static method in class org.apache.spark.ml.feature.Binarizer
 
params() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
params() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
params() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
params() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
params() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
params() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
params() - Static method in class org.apache.spark.ml.feature.DCT
 
params() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
params() - Static method in class org.apache.spark.ml.feature.HashingTF
 
params() - Static method in class org.apache.spark.ml.feature.IDF
 
params() - Static method in class org.apache.spark.ml.feature.IDFModel
 
params() - Static method in class org.apache.spark.ml.feature.IndexToString
 
params() - Static method in class org.apache.spark.ml.feature.Interaction
 
params() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
params() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
params() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
params() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
params() - Static method in class org.apache.spark.ml.feature.NGram
 
params() - Static method in class org.apache.spark.ml.feature.Normalizer
 
params() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
params() - Static method in class org.apache.spark.ml.feature.PCA
 
params() - Static method in class org.apache.spark.ml.feature.PCAModel
 
params() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
params() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
params() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
params() - Static method in class org.apache.spark.ml.feature.RFormula
 
params() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
params() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
params() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
params() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
params() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
params() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
params() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
params() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
params() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
params() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
params() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
params() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
params() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
params() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
params() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
Params - Interface in org.apache.spark.ml.param
:: DeveloperApi :: Trait for components that take parameters.
params() - Method in interface org.apache.spark.ml.param.Params
Returns all params sorted by their names.
params() - Static method in class org.apache.spark.ml.Pipeline
 
params() - Static method in class org.apache.spark.ml.PipelineModel
 
params() - Static method in class org.apache.spark.ml.recommendation.ALS
 
params() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
params() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
params() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
params() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
params() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
params() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
params() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
params() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
params() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
params() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
params() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
params() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
params() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
ParamValidators - Class in org.apache.spark.ml.param
:: DeveloperApi :: Factory methods for common validation functions for Param.isValid.
ParamValidators() - Constructor for class org.apache.spark.ml.param.ParamValidators
 
parCombiner() - Static method in class org.apache.spark.sql.types.StructType
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
parent() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
parent() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
parent() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
parent() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
parent() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
parent() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
parent() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
parent() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
parent() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
parent() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
parent() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
parent() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
parent() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
parent() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
parent() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
parent() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
parent() - Static method in class org.apache.spark.ml.feature.IDFModel
 
parent() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
parent() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
parent() - Static method in class org.apache.spark.ml.feature.PCAModel
 
parent() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
parent() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
parent() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
parent() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
parent() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
parent() - Method in class org.apache.spark.ml.Model
The parent estimator that produced this model.
parent() - Static method in class org.apache.spark.ml.param.DoubleParam
 
parent() - Static method in class org.apache.spark.ml.param.FloatParam
 
parent() - Method in class org.apache.spark.ml.param.Param
 
parent() - Static method in class org.apache.spark.ml.PipelineModel
 
parent() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
parent() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
parent() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
parent() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
parent() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
parent() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
parent() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
parent() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
parent() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
parent() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
parent(int, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
parent(int, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Returns the jth parent RDD: e.g.
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.PipelineModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
parent_$eq(Estimator<M>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
parentIds() - Method in class org.apache.spark.scheduler.StageInfo
 
parentIds() - Method in class org.apache.spark.storage.RDDInfo
 
parentIndex(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Get the parent index of the given node, or 0 if it is the root.
parquet(String...) - Method in class org.apache.spark.sql.DataFrameReader
Loads a Parquet file, returning the result as a DataFrame.
parquet(Seq<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads a Parquet file, returning the result as a DataFrame.
parquet(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame in Parquet format at the specified path.
parse(String) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
parse(String) - Static method in class org.apache.spark.mllib.linalg.Vectors
Parses a string resulted from Vector.toString into a Vector.
parse(String) - Static method in class org.apache.spark.mllib.regression.LabeledPoint
Parses a string resulted from LabeledPoint#toString into an LabeledPoint.
parse(String) - Static method in class org.apache.spark.mllib.util.NumericParser
Parses a string into a Double, an Array[Double], or a Seq[Any].
parseAll(Parsers.Parser<T>, Reader<Object>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
parseAll(Parsers.Parser<T>, Reader) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
parseAll(Parsers.Parser<T>, CharSequence) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
parseDataType(String) - Method in class org.apache.spark.sql.SparkSession
Parses the data type in our internal string representation.
parseDataType(String) - Method in class org.apache.spark.sql.SQLContext
Parses the data type in our internal string representation.
parseHostPort(String) - Static method in class org.apache.spark.util.Utils
 
parseIgnoreCase(Class<E>, String) - Static method in class org.apache.spark.util.EnumUtil
 
parsePortMappingsSpec(String) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
Parse a comma-delimited list of port mapping specs, each of which takes the form host_port:container_port[:udp|:tcp]
Parser(Function1<Reader<Object>, Parsers.ParseResult<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
parseSql(String) - Method in class org.apache.spark.sql.SparkSession
 
parseSql(String) - Method in class org.apache.spark.sql.SQLContext
 
parseStandaloneMasterUrls(String) - Static method in class org.apache.spark.util.Utils
Split the comma delimited string of master URLs into a list.
parseVolumesSpec(String) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
Parse a comma-delimited list of volume specs, each of which takes the form [host-dir:]container-dir[:rw|:ro].
PartialResult<R> - Class in org.apache.spark.partial
 
PartialResult(R, boolean) - Constructor for class org.apache.spark.partial.PartialResult
 
Partition - Interface in org.apache.spark
An identifier for a partition in an RDD.
partition() - Method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
partition() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
partition(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
partition() - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
partitionBy(Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a copy of the RDD partitioned using the specified partitioner.
partitionBy(PartitionStrategy) - Method in class org.apache.spark.graphx.Graph
Repartitions the edges in the graph according to partitionStrategy.
partitionBy(PartitionStrategy, int) - Method in class org.apache.spark.graphx.Graph
Repartitions the edges in the graph according to partitionStrategy.
partitionBy(PartitionStrategy) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
partitionBy(PartitionStrategy, int) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
partitionBy(Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return a copy of the RDD partitioned using the specified partitioner.
partitionBy(String...) - Method in class org.apache.spark.sql.DataFrameWriter
Partitions the output by the given columns on the file system.
partitionBy(Seq<String>) - Method in class org.apache.spark.sql.DataFrameWriter
Partitions the output by the given columns on the file system.
partitionBy(String, String...) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the partitioning defined.
partitionBy(Column...) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the partitioning defined.
partitionBy(String, Seq<String>) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the partitioning defined.
partitionBy(Seq<Column>) - Static method in class org.apache.spark.sql.expressions.Window
Creates a WindowSpec with the partitioning defined.
partitionBy(String, String...) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the partitioning columns in a WindowSpec.
partitionBy(Column...) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the partitioning columns in a WindowSpec.
partitionBy(String, Seq<String>) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the partitioning columns in a WindowSpec.
partitionBy(Seq<Column>) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the partitioning columns in a WindowSpec.
PartitionCoalescer - Interface in org.apache.spark.rdd
::DeveloperApi:: A PartitionCoalescer defines how to coalesce the partitions of a given RDD.
partitioner() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
partitioner() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
partitioner() - Static method in class org.apache.spark.api.java.JavaRDD
 
partitioner() - Method in interface org.apache.spark.api.java.JavaRDDLike
The partitioner of this RDD.
partitioner() - Static method in class org.apache.spark.api.r.RRDD
 
partitioner() - Static method in class org.apache.spark.graphx.EdgeRDD
 
partitioner() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
If partitionsRDD already has a partitioner, use it.
partitioner() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
partitioner() - Static method in class org.apache.spark.graphx.VertexRDD
 
Partitioner - Class in org.apache.spark
An object that defines how the elements in a key-value pair RDD are partitioned by key.
Partitioner() - Constructor for class org.apache.spark.Partitioner
 
partitioner() - Method in class org.apache.spark.rdd.CoGroupedRDD
 
partitioner() - Static method in class org.apache.spark.rdd.HadoopRDD
 
partitioner() - Static method in class org.apache.spark.rdd.JdbcRDD
 
partitioner() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
partitioner() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
partitioner() - Method in class org.apache.spark.rdd.RDD
Optionally overridden by subclasses to specify how they are partitioned.
partitioner() - Method in class org.apache.spark.rdd.ShuffledRDD
 
partitioner() - Method in class org.apache.spark.ShuffleDependency
 
partitioner(Partitioner) - Method in class org.apache.spark.streaming.StateSpec
Set the partitioner by which the state RDDs generated by mapWithState will be partitioned.
PartitionGroup - Class in org.apache.spark.rdd
::DeveloperApi:: A group of Partitions param: prefLoc preferred location for the partition group
PartitionGroup(Option<String>) - Constructor for class org.apache.spark.rdd.PartitionGroup
 
partitionID() - Method in class org.apache.spark.TaskCommitDenied
 
partitionId() - Method in class org.apache.spark.TaskContext
The ID of the RDD partition that is computed by this task.
PartitionPruningRDD<T> - Class in org.apache.spark.rdd
:: DeveloperApi :: A RDD used to prune RDD partitions/partitions so we can avoid launching tasks on all partitions.
PartitionPruningRDD(RDD<T>, Function1<Object, Object>, ClassTag<T>) - Constructor for class org.apache.spark.rdd.PartitionPruningRDD
 
partitions() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
partitions() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
partitions() - Static method in class org.apache.spark.api.java.JavaRDD
 
partitions() - Method in interface org.apache.spark.api.java.JavaRDDLike
Set of partitions in this RDD.
partitions() - Static method in class org.apache.spark.api.r.RRDD
 
partitions() - Static method in class org.apache.spark.graphx.EdgeRDD
 
partitions() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
partitions() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
partitions() - Static method in class org.apache.spark.graphx.VertexRDD
 
partitions() - Static method in class org.apache.spark.rdd.HadoopRDD
 
partitions() - Static method in class org.apache.spark.rdd.JdbcRDD
 
partitions() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
partitions() - Method in class org.apache.spark.rdd.PartitionGroup
 
partitions() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
partitions() - Method in class org.apache.spark.rdd.RDD
Get the array of partitions of this RDD, taking into account whether the RDD is checkpointed or not.
partitions() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
partitionsRDD() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
partitionsRDD() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
PartitionStrategy - Interface in org.apache.spark.graphx
Represents the way edges are assigned to edge partitions based on their source and destination vertex IDs.
PartitionStrategy.CanonicalRandomVertexCut$ - Class in org.apache.spark.graphx
Assigns edges to partitions by hashing the source and destination vertex IDs in a canonical direction, resulting in a random vertex cut that colocates all edges between two vertices, regardless of direction.
PartitionStrategy.CanonicalRandomVertexCut$() - Constructor for class org.apache.spark.graphx.PartitionStrategy.CanonicalRandomVertexCut$
 
PartitionStrategy.EdgePartition1D$ - Class in org.apache.spark.graphx
Assigns edges to partitions using only the source vertex ID, colocating edges with the same source.
PartitionStrategy.EdgePartition1D$() - Constructor for class org.apache.spark.graphx.PartitionStrategy.EdgePartition1D$
 
PartitionStrategy.EdgePartition2D$ - Class in org.apache.spark.graphx
Assigns edges to partitions using a 2D partitioning of the sparse edge adjacency matrix, guaranteeing a 2 * sqrt(numParts) bound on vertex replication.
PartitionStrategy.EdgePartition2D$() - Constructor for class org.apache.spark.graphx.PartitionStrategy.EdgePartition2D$
 
PartitionStrategy.RandomVertexCut$ - Class in org.apache.spark.graphx
Assigns edges to partitions by hashing the source and destination vertex IDs, resulting in a random vertex cut that colocates all same-direction edges between two vertices.
PartitionStrategy.RandomVertexCut$() - Constructor for class org.apache.spark.graphx.PartitionStrategy.RandomVertexCut$
 
partsWithLocs() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer.PartitionLocations
 
partsWithoutLocs() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer.PartitionLocations
 
patch(int, GenSeq<B>, int, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
path() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
path() - Method in class org.apache.spark.scheduler.SplitInfo
 
pattern() - Method in class org.apache.spark.ml.feature.RegexTokenizer
Regex pattern used to match delimiters if gaps is true or tokens if gaps is false.
pc() - Method in class org.apache.spark.ml.feature.PCAModel
 
pc() - Method in class org.apache.spark.mllib.feature.PCAModel
 
PCA - Class in org.apache.spark.ml.feature
:: Experimental :: PCA trains a model to project vectors to a low-dimensional space using PCA.
PCA(String) - Constructor for class org.apache.spark.ml.feature.PCA
 
PCA() - Constructor for class org.apache.spark.ml.feature.PCA
 
PCA - Class in org.apache.spark.mllib.feature
A feature transformer that projects vectors to a low-dimensional space using PCA.
PCA(int) - Constructor for class org.apache.spark.mllib.feature.PCA
 
PCAModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by PCA.
PCAModel - Class in org.apache.spark.mllib.feature
Model fitted by PCA that can project vectors to a low-dimensional space using PCA.
pdf(Vector) - Method in class org.apache.spark.ml.stat.distribution.MultivariateGaussian
Returns density of this multivariate Gaussian at given point, x
pdf(Vector) - Method in class org.apache.spark.mllib.stat.distribution.MultivariateGaussian
Returns density of this multivariate Gaussian at given point, x
PEAK_EXECUTION_MEMORY() - Static method in class org.apache.spark.InternalAccumulator
 
PEAK_EXECUTION_MEMORY() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
PEAK_EXECUTION_MEMORY() - Static method in class org.apache.spark.ui.ToolTips
 
PEARSON() - Static method in class org.apache.spark.mllib.stat.test.ChiSqTest
 
PearsonCorrelation - Class in org.apache.spark.mllib.stat.correlation
Compute Pearson correlation for two RDDs of the type RDD[Double] or the correlation matrix for an RDD of the type RDD[Vector].
PearsonCorrelation() - Constructor for class org.apache.spark.mllib.stat.correlation.PearsonCorrelation
 
pendingStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
percent_rank() - Static method in class org.apache.spark.sql.functions
Window function: returns the relative rank (i.e.
percentiles() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
percentilesHeader() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
permutations() - Static method in class org.apache.spark.sql.types.StructType
 
persist(StorageLevel) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Set this RDD's storage level to persist its values across operations after the first time it is computed.
persist(StorageLevel) - Method in class org.apache.spark.api.java.JavaPairRDD
Set this RDD's storage level to persist its values across operations after the first time it is computed.
persist(StorageLevel) - Method in class org.apache.spark.api.java.JavaRDD
Set this RDD's storage level to persist its values across operations after the first time it is computed.
persist(StorageLevel) - Static method in class org.apache.spark.api.r.RRDD
 
persist() - Static method in class org.apache.spark.api.r.RRDD
 
persist(StorageLevel) - Static method in class org.apache.spark.graphx.EdgeRDD
 
persist() - Static method in class org.apache.spark.graphx.EdgeRDD
 
persist(StorageLevel) - Method in class org.apache.spark.graphx.Graph
Caches the vertices and edges associated with this graph at the specified storage level, ignoring any target storage levels previously set.
persist(StorageLevel) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
Persists the edge partitions at the specified storage level, ignoring any existing target storage level.
persist(StorageLevel) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
persist(StorageLevel) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
Persists the vertex partitions at the specified storage level, ignoring any existing target storage level.
persist(StorageLevel) - Static method in class org.apache.spark.graphx.VertexRDD
 
persist() - Static method in class org.apache.spark.graphx.VertexRDD
 
persist(StorageLevel) - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Persists the underlying RDD with the specified storage level.
persist(StorageLevel) - Method in class org.apache.spark.rdd.HadoopRDD
 
persist(StorageLevel) - Static method in class org.apache.spark.rdd.JdbcRDD
 
persist() - Static method in class org.apache.spark.rdd.JdbcRDD
 
persist(StorageLevel) - Method in class org.apache.spark.rdd.NewHadoopRDD
 
persist(StorageLevel) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
persist() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
persist(StorageLevel) - Method in class org.apache.spark.rdd.RDD
Set this RDD's storage level to persist its values across operations after the first time it is computed.
persist() - Method in class org.apache.spark.rdd.RDD
Persist this RDD with the default storage level (`MEMORY_ONLY`).
persist() - Method in class org.apache.spark.sql.Dataset
Persist this Dataset with the default storage level (MEMORY_AND_DISK).
persist(StorageLevel) - Method in class org.apache.spark.sql.Dataset
Persist this Dataset with the given storage level.
persist() - Method in class org.apache.spark.streaming.api.java.JavaDStream
Persist RDDs of this DStream with the default storage level (MEMORY_ONLY_SER)
persist(StorageLevel) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Persist the RDDs of this DStream with the given storage level
persist() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
persist(StorageLevel) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
persist() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Persist RDDs of this DStream with the default storage level (MEMORY_ONLY_SER)
persist(StorageLevel) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Persist the RDDs of this DStream with the given storage level
persist() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
persist(StorageLevel) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
persist() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
persist(StorageLevel) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
persist() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
persist(StorageLevel) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
persist(StorageLevel) - Method in class org.apache.spark.streaming.dstream.DStream
Persist the RDDs of this DStream with the given storage level
persist() - Method in class org.apache.spark.streaming.dstream.DStream
Persist RDDs of this DStream with the default storage level (MEMORY_ONLY_SER)
persist$default$1() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
personalizedPageRank(long, double, double) - Method in class org.apache.spark.graphx.GraphOps
Run personalized PageRank for a given vertex, such that all random walks are started relative to the source node.
phrase(Parsers.Parser<T>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
pi() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
pi() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
pi() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$.Data
 
pi() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$.Data
 
pickBin(Partition, RDD<?>, double, DefaultPartitionCoalescer.PartitionLocations) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
Takes a parent RDD partition and decides which of the partition groups to put it in Takes locality into account, but also uses power of 2 choices to load balance It strikes a balance between the two using the balanceSlack variable
pickRandomVertex() - Method in class org.apache.spark.graphx.GraphOps
Picks a random vertex from the graph and returns its ID.
pipe(String) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
pipe(List<String>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
pipe(List<String>, Map<String, String>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
pipe(List<String>, Map<String, String>, boolean, int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
pipe(String) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
pipe(List<String>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
pipe(List<String>, Map<String, String>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
pipe(List<String>, Map<String, String>, boolean, int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
pipe(String) - Static method in class org.apache.spark.api.java.JavaRDD
 
pipe(List<String>) - Static method in class org.apache.spark.api.java.JavaRDD
 
pipe(List<String>, Map<String, String>) - Static method in class org.apache.spark.api.java.JavaRDD
 
pipe(List<String>, Map<String, String>, boolean, int) - Static method in class org.apache.spark.api.java.JavaRDD
 
pipe(String) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD created by piping elements to a forked external process.
pipe(List<String>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD created by piping elements to a forked external process.
pipe(List<String>, Map<String, String>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD created by piping elements to a forked external process.
pipe(List<String>, Map<String, String>, boolean, int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an RDD created by piping elements to a forked external process.
pipe(String) - Static method in class org.apache.spark.api.r.RRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.api.r.RRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.api.r.RRDD
 
pipe(String) - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe(String) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe(String) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe(String) - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe(String) - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe(String) - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe(String) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe(String) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe(String, Map<String, String>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe(String) - Method in class org.apache.spark.rdd.RDD
Return an RDD created by piping elements to a forked external process.
pipe(String, Map<String, String>) - Method in class org.apache.spark.rdd.RDD
Return an RDD created by piping elements to a forked external process.
pipe(Seq<String>, Map<String, String>, Function1<Function1<String, BoxedUnit>, BoxedUnit>, Function2<T, Function1<String, BoxedUnit>, BoxedUnit>, boolean, int) - Method in class org.apache.spark.rdd.RDD
Return an RDD created by piping elements to a forked external process.
pipe$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
pipe$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe$default$3() - Static method in class org.apache.spark.api.r.RRDD
 
pipe$default$3() - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe$default$3() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe$default$3() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe$default$3() - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe$default$3() - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe$default$3() - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe$default$3() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe$default$3() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe$default$4() - Static method in class org.apache.spark.api.r.RRDD
 
pipe$default$4() - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe$default$4() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe$default$4() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe$default$4() - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe$default$4() - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe$default$4() - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe$default$4() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe$default$4() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe$default$5() - Static method in class org.apache.spark.api.r.RRDD
 
pipe$default$5() - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe$default$5() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe$default$5() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe$default$5() - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe$default$5() - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe$default$5() - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe$default$5() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe$default$5() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
pipe$default$6() - Static method in class org.apache.spark.api.r.RRDD
 
pipe$default$6() - Static method in class org.apache.spark.graphx.EdgeRDD
 
pipe$default$6() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
pipe$default$6() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
pipe$default$6() - Static method in class org.apache.spark.graphx.VertexRDD
 
pipe$default$6() - Static method in class org.apache.spark.rdd.HadoopRDD
 
pipe$default$6() - Static method in class org.apache.spark.rdd.JdbcRDD
 
pipe$default$6() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
pipe$default$6() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
Pipeline - Class in org.apache.spark.ml
:: Experimental :: A simple pipeline, which acts as an estimator.
Pipeline(String) - Constructor for class org.apache.spark.ml.Pipeline
 
Pipeline() - Constructor for class org.apache.spark.ml.Pipeline
 
Pipeline.SharedReadWrite$ - Class in org.apache.spark.ml
Methods for MLReader and MLWriter shared between Pipeline and PipelineModel
Pipeline.SharedReadWrite$() - Constructor for class org.apache.spark.ml.Pipeline.SharedReadWrite$
 
PipelineModel - Class in org.apache.spark.ml
:: Experimental :: Represents a fitted pipeline.
PipelineStage - Class in org.apache.spark.ml
:: DeveloperApi :: A stage in a pipeline, either an Estimator or a Transformer.
PipelineStage() - Constructor for class org.apache.spark.ml.PipelineStage
 
pivot(String) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Pivots a column of the current DataFrame and perform the specified aggregation.
pivot(String, Seq<Object>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Pivots a column of the current DataFrame and perform the specified aggregation.
pivot(String, List<Object>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Pivots a column of the current DataFrame and perform the specified aggregation.
plan() - Method in exception org.apache.spark.sql.AnalysisException
 
plus(Object) - Method in class org.apache.spark.sql.Column
Sum of this expression and another expression.
plus(Duration) - Method in class org.apache.spark.streaming.Duration
 
plus(Duration) - Method in class org.apache.spark.streaming.Time
 
PMMLExportable - Interface in org.apache.spark.mllib.pmml
:: DeveloperApi :: Export model to the PMML format Predictive Model Markup Language (PMML) is an XML-based file format developed by the Data Mining Group (www.dmg.org).
PMMLModelExportFactory - Class in org.apache.spark.mllib.pmml.export
 
PMMLModelExportFactory() - Constructor for class org.apache.spark.mllib.pmml.export.PMMLModelExportFactory
 
pmod(Column, Column) - Static method in class org.apache.spark.sql.functions
Returns the positive value of dividend mod divisor.
point() - Method in class org.apache.spark.mllib.feature.VocabWord
 
POINTS() - Static method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
PoissonBounds - Class in org.apache.spark.util.random
Utility functions that help us determine bounds on adjusted sampling rate to guarantee exact sample sizes with high confidence when sampling with replacement.
PoissonBounds() - Constructor for class org.apache.spark.util.random.PoissonBounds
 
PoissonGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
PoissonGenerator(double) - Constructor for class org.apache.spark.mllib.random.PoissonGenerator
 
poissonJavaRDD(JavaSparkContext, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
poissonJavaRDD(JavaSparkContext, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
poissonJavaRDD(JavaSparkContext, double, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
poissonJavaVectorRDD(JavaSparkContext, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
poissonJavaVectorRDD(JavaSparkContext, double, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
poissonJavaVectorRDD(JavaSparkContext, double, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
poissonRDD(SparkContext, double, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD comprised of i.i.d. samples from the Poisson distribution with the input mean.
PoissonSampler<T> - Class in org.apache.spark.util.random
:: DeveloperApi :: A sampler for sampling with replacement, based on values drawn from Poisson distribution.
PoissonSampler(double, boolean) - Constructor for class org.apache.spark.util.random.PoissonSampler
 
PoissonSampler(double) - Constructor for class org.apache.spark.util.random.PoissonSampler
 
poissonVectorRDD(SparkContext, double, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD[Vector] with vectors containing i.i.d. samples drawn from the Poisson distribution with the input mean.
PolynomialExpansion - Class in org.apache.spark.ml.feature
:: Experimental :: Perform feature expansion in a polynomial space.
PolynomialExpansion(String) - Constructor for class org.apache.spark.ml.feature.PolynomialExpansion
 
PolynomialExpansion() - Constructor for class org.apache.spark.ml.feature.PolynomialExpansion
 
poolToActiveStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
port() - Method in interface org.apache.spark.SparkExecutorInfo
 
port() - Method in class org.apache.spark.SparkExecutorInfoImpl
 
port() - Method in class org.apache.spark.storage.BlockManagerId
 
port() - Method in class org.apache.spark.streaming.kafka.Broker
Broker's port
port() - Method in class org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset
 
PortableDataStream - Class in org.apache.spark.input
A class that allows DataStreams to be serialized and moved around by not creating them until they need to be read
PortableDataStream(CombineFileSplit, TaskAttemptContext, Integer) - Constructor for class org.apache.spark.input.PortableDataStream
 
portMaxRetries(SparkConf) - Static method in class org.apache.spark.util.Utils
Maximum number of retries when binding to a port before giving up.
positioned(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
PostgresDialect - Class in org.apache.spark.sql.jdbc
 
PostgresDialect() - Constructor for class org.apache.spark.sql.jdbc.PostgresDialect
 
pow(Column, Column) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(Column, String) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(String, Column) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(String, String) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(Column, double) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(String, double) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(double, Column) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
pow(double, String) - Static method in class org.apache.spark.sql.functions
Returns the value of the first argument raised to the power of the second argument.
PowerIterationClustering - Class in org.apache.spark.mllib.clustering
Power Iteration Clustering (PIC), a scalable graph clustering algorithm developed by Lin and Cohen.
PowerIterationClustering() - Constructor for class org.apache.spark.mllib.clustering.PowerIterationClustering
Constructs a PIC instance with default parameters: {k: 2, maxIterations: 100, initMode: "random"}.
PowerIterationClustering.Assignment - Class in org.apache.spark.mllib.clustering
Cluster assignment.
PowerIterationClustering.Assignment(long, int) - Constructor for class org.apache.spark.mllib.clustering.PowerIterationClustering.Assignment
 
PowerIterationClustering.Assignment$ - Class in org.apache.spark.mllib.clustering
 
PowerIterationClustering.Assignment$() - Constructor for class org.apache.spark.mllib.clustering.PowerIterationClustering.Assignment$
 
PowerIterationClusteringModel - Class in org.apache.spark.mllib.clustering
Model produced by PowerIterationClustering.
PowerIterationClusteringModel(int, RDD<PowerIterationClustering.Assignment>) - Constructor for class org.apache.spark.mllib.clustering.PowerIterationClusteringModel
 
PowerIterationClusteringModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.clustering
 
PowerIterationClusteringModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.clustering.PowerIterationClusteringModel.SaveLoadV1_0$
 
pr() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
Returns the precision-recall curve, which is an Dataframe containing two fields recall, precision with (0.0, 1.0) prepended to it.
pr() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns the precision-recall curve, which is an RDD of (recall, precision), NOT (precision, recall), with (0.0, 1.0) prepended to it.
Precision - Class in org.apache.spark.mllib.evaluation.binary
Precision.
Precision() - Constructor for class org.apache.spark.mllib.evaluation.binary.Precision
 
precision(double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns precision for a given label (category)
precision() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Deprecated.
Use accuracy. Since 2.0.0.
precision() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns document-based precision averaged by the number of documents
precision(double) - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns precision for a given label (category)
precision() - Method in class org.apache.spark.sql.types.Decimal
 
precision() - Method in class org.apache.spark.sql.types.DecimalType
 
precisionAt(int) - Method in class org.apache.spark.mllib.evaluation.RankingMetrics
Compute the average precision of all the queries, truncated at ranking position k.
precisionByThreshold() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
Returns a dataframe with two fields (threshold, precision) curve.
precisionByThreshold() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns the (threshold, precision) curve.
predict(FeaturesType) - Method in class org.apache.spark.ml.classification.ClassificationModel
Predict label for the given features.
predict(Vector) - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
predict(Vector) - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
predict(Vector) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
Predict label for the given feature vector.
predict(Vector) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
Predict label for the given features.
predict(FeaturesType) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
predict(FeaturesType) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
predict(FeaturesType) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
predict(FeaturesType) - Method in class org.apache.spark.ml.PredictionModel
Predict label for the given features.
predict(Vector) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
predict(Vector) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
predict(Vector) - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
predict(Vector) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
predict(Vector) - Method in class org.apache.spark.ml.regression.LinearRegressionModel
 
predict(Vector) - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
predict(RDD<Vector>) - Method in interface org.apache.spark.mllib.classification.ClassificationModel
Predict values for the given data set using the model trained.
predict(Vector) - Method in interface org.apache.spark.mllib.classification.ClassificationModel
Predict values for a single data point using the model trained.
predict(JavaRDD<Vector>) - Method in interface org.apache.spark.mllib.classification.ClassificationModel
Predict values for examples stored in a JavaRDD.
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
predict(Vector) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
predict(RDD<Vector>) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
predict(Vector) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
predict(Vector) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
predict(Vector) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Predicts the index of the cluster that the input point belongs to.
predict(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Predicts the indices of the clusters that the input points belong to.
predict(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
Java-friendly version of predict().
predict(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
Maps given points to their cluster indices.
predict(Vector) - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
Maps given point to its cluster index.
predict(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
Java-friendly version of predict()
predict(Vector) - Method in class org.apache.spark.mllib.clustering.KMeansModel
Returns the cluster index that a given point belongs to.
predict(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.KMeansModel
Maps given points to their cluster indices.
predict(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.clustering.KMeansModel
Maps given points to their cluster indices.
predict(int, int) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Predict the rating of one user for one product.
predict(RDD<Tuple2<Object, Object>>) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Predict the rating of many users for many products.
predict(JavaPairRDD<Integer, Integer>) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Java-friendly version of MatrixFactorizationModel.predict.
predict(RDD<Vector>) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearModel
Predict values for the given data set using the model trained.
predict(Vector) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearModel
Predict values for a single data point using the model trained.
predict(RDD<Object>) - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
Predict labels for provided features.
predict(JavaDoubleRDD) - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
Predict labels for provided features.
predict(double) - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
Predict a single label.
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
predict(Vector) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
predict(Vector) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
predict(RDD<Vector>) - Method in interface org.apache.spark.mllib.regression.RegressionModel
Predict values for the given data set using the model trained.
predict(Vector) - Method in interface org.apache.spark.mllib.regression.RegressionModel
Predict values for a single data point using the model trained.
predict(JavaRDD<Vector>) - Method in interface org.apache.spark.mllib.regression.RegressionModel
Predict values for examples stored in a JavaRDD.
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
predict(Vector) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
predict(Vector) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Predict values for a single data point using the model trained.
predict(RDD<Vector>) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Predict values for the given data set using the model trained.
predict(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Predict values for the given data set using the model trained.
predict() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
predict() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.PredictData
 
predict(Vector) - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
predict() - Method in class org.apache.spark.mllib.tree.model.Node
 
predict(Vector) - Method in class org.apache.spark.mllib.tree.model.Node
predict value if node is not leaf
Predict - Class in org.apache.spark.mllib.tree.model
Predicted value for a node param: predict predicted value param: prob probability of the label (classification only)
Predict(double, double) - Constructor for class org.apache.spark.mllib.tree.model.Predict
 
predict() - Method in class org.apache.spark.mllib.tree.model.Predict
 
predict(Vector) - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
predict(RDD<Vector>) - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
predict(JavaRDD<Vector>) - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
prediction() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
prediction() - Method in class org.apache.spark.ml.tree.InternalNode
 
prediction() - Method in class org.apache.spark.ml.tree.LeafNode
 
prediction() - Method in class org.apache.spark.ml.tree.Node
Prediction a leaf node makes, or which an internal node would make if it were a leaf node
predictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
predictionCol() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
predictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
predictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
predictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
predictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
predictionCol() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
predictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
predictionCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
predictionCol() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
predictionCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
predictionCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
predictionCol() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
 
predictionCol() - Static method in class org.apache.spark.ml.clustering.KMeans
 
predictionCol() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
predictionCol() - Method in class org.apache.spark.ml.clustering.KMeansSummary
 
predictionCol() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
predictionCol() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
predictionCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
predictionCol() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
predictionCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
predictionCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
predictionCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
predictionCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
predictionCol() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
predictionCol() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
predictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
predictionCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
predictionCol() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Field in "predictions" which gives the predicted value of each instance.
predictionCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
predictionCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
predictionCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
predictionCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
predictionCol() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
 
predictionCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
predictionCol() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
PredictionModel<FeaturesType,M extends PredictionModel<FeaturesType,M>> - Class in org.apache.spark.ml
:: DeveloperApi :: Abstraction for a model for prediction tasks (regression and classification).
PredictionModel() - Constructor for class org.apache.spark.ml.PredictionModel
 
predictions() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
 
predictions() - Method in interface org.apache.spark.ml.classification.LogisticRegressionSummary
Dataframe output by the model's `transform` method.
predictions() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
 
predictions() - Method in class org.apache.spark.ml.clustering.KMeansSummary
 
predictions() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Predictions output by the model's `transform` method.
predictions() - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
Predictions associated with the boundaries at the same index, monotone because of isotonic regression.
predictions() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
 
predictions() - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
predictOn(DStream<Vector>) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Use the clustering model to make predictions on batches of data from a DStream.
predictOn(JavaDStream<Vector>) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Java-friendly version of predictOn.
predictOn(DStream<Vector>) - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Use the model to make predictions on batches of data from a DStream
predictOn(JavaDStream<Vector>) - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Java-friendly version of predictOn.
predictOnValues(DStream<Tuple2<K, Vector>>, ClassTag<K>) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Use the model to make predictions on the values of a DStream and carry over its keys.
predictOnValues(JavaPairDStream<K, Vector>) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Java-friendly version of predictOnValues.
predictOnValues(DStream<Tuple2<K, Vector>>, ClassTag<K>) - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Use the model to make predictions on the values of a DStream and carry over its keys.
predictOnValues(JavaPairDStream<K, Vector>) - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Java-friendly version of predictOnValues.
Predictor<FeaturesType,Learner extends Predictor<FeaturesType,Learner,M>,M extends PredictionModel<FeaturesType,M>> - Class in org.apache.spark.ml
:: DeveloperApi :: Abstraction for prediction problems (regression and classification).
Predictor() - Constructor for class org.apache.spark.ml.Predictor
 
predictPoint(Vector, Vector, double) - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
predictPoint(Vector, Vector, double) - Method in class org.apache.spark.mllib.classification.SVMModel
 
predictPoint(Vector, Vector, double) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearModel
Predict the result given a data point and the weights learned.
predictPoint(Vector, Vector, double) - Method in class org.apache.spark.mllib.regression.LassoModel
 
predictPoint(Vector, Vector, double) - Method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
predictPoint(Vector, Vector, double) - Method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
predictProbabilities(RDD<Vector>) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
Predict values for the given data set using the model trained.
predictProbabilities(Vector) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
Predict posterior class probabilities for a single data point using the model trained.
predictProbability(FeaturesType) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
predictProbability(FeaturesType) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
predictProbability(FeaturesType) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
predictProbability(FeaturesType) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
Predict the probability of each class given the features.
predictProbability(FeaturesType) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
predictQuantiles(Vector) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
predictRaw(FeaturesType) - Method in class org.apache.spark.ml.classification.ClassificationModel
Raw prediction for each possible label.
predictRaw(Vector) - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
predictRaw(Vector) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
predictRaw(Vector) - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
predictRaw(FeaturesType) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
predictRaw(Vector) - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
predictSoft(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
Given the input vectors, return the membership value of each vector to all mixture components.
predictSoft(Vector) - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
Given the input vector, return the membership values to all mixture components.
predictVariance(Vector) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
We need to update this function if we ever add other impurity measures.
preferredLocation() - Method in class org.apache.spark.streaming.receiver.Receiver
Override this to specify a preferred location (hostname).
preferredLocations(Partition) - Static method in class org.apache.spark.api.r.RRDD
 
preferredLocations(Partition) - Static method in class org.apache.spark.graphx.EdgeRDD
 
preferredLocations(Partition) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
preferredLocations(Partition) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
preferredLocations(Partition) - Static method in class org.apache.spark.graphx.VertexRDD
 
preferredLocations(Partition) - Static method in class org.apache.spark.rdd.HadoopRDD
 
preferredLocations(Partition) - Static method in class org.apache.spark.rdd.JdbcRDD
 
preferredLocations(Partition) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
preferredLocations(Partition) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
preferredLocations(Partition) - Method in class org.apache.spark.rdd.RDD
Get the preferred locations of a partition, taking into account whether the RDD is checkpointed.
prefixesToRewrite() - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
prefixLength(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
PrefixSpan - Class in org.apache.spark.mllib.fpm
:: Experimental ::
PrefixSpan() - Constructor for class org.apache.spark.mllib.fpm.PrefixSpan
Constructs a default instance with default parameters {minSupport: 0.1, maxPatternLength: 10, maxLocalProjDBSize: 32000000L}.
PrefixSpan.FreqSequence<Item> - Class in org.apache.spark.mllib.fpm
Represents a frequent sequence.
PrefixSpan.FreqSequence(Object[], long) - Constructor for class org.apache.spark.mllib.fpm.PrefixSpan.FreqSequence
 
PrefixSpan.Postfix$ - Class in org.apache.spark.mllib.fpm
 
PrefixSpan.Postfix$() - Constructor for class org.apache.spark.mllib.fpm.PrefixSpan.Postfix$
 
PrefixSpan.Prefix$ - Class in org.apache.spark.mllib.fpm
 
PrefixSpan.Prefix$() - Constructor for class org.apache.spark.mllib.fpm.PrefixSpan.Prefix$
 
PrefixSpanModel<Item> - Class in org.apache.spark.mllib.fpm
Model fitted by PrefixSpan param: freqSequences frequent sequences
PrefixSpanModel(RDD<PrefixSpan.FreqSequence<Item>>) - Constructor for class org.apache.spark.mllib.fpm.PrefixSpanModel
 
PrefixSpanModel.SaveLoadV1_0$ - Class in org.apache.spark.mllib.fpm
 
PrefixSpanModel.SaveLoadV1_0$() - Constructor for class org.apache.spark.mllib.fpm.PrefixSpanModel.SaveLoadV1_0$
 
prefLoc() - Method in class org.apache.spark.rdd.PartitionGroup
 
pregel(A, int, EdgeDirection, Function3<Object, VD, A, VD>, Function1<EdgeTriplet<VD, ED>, Iterator<Tuple2<Object, A>>>, Function2<A, A, A>, ClassTag<A>) - Method in class org.apache.spark.graphx.GraphOps
Execute a Pregel-like iterative vertex-parallel abstraction.
Pregel - Class in org.apache.spark.graphx
Implements a Pregel-like bulk-synchronous message-passing API.
Pregel() - Constructor for class org.apache.spark.graphx.Pregel
 
prepare() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
prepareRead(SparkSession, Map<String, String>, Seq<FileStatus>) - Method in class org.apache.spark.ml.source.libsvm.DefaultSource
 
prepareSubqueries() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
prepareWritable(Writable, Seq<Tuple2<String, String>>) - Static method in class org.apache.spark.sql.hive.HiveShim
 
prepareWrite(SparkSession, Job, Map<String, String>, StructType) - Method in class org.apache.spark.ml.source.libsvm.DefaultSource
 
prependBaseUri(String, String) - Static method in class org.apache.spark.ui.UIUtils
 
prettyJson() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
prettyJson() - Static method in class org.apache.spark.sql.types.ArrayType
 
prettyJson() - Static method in class org.apache.spark.sql.types.BinaryType
 
prettyJson() - Static method in class org.apache.spark.sql.types.BooleanType
 
prettyJson() - Static method in class org.apache.spark.sql.types.ByteType
 
prettyJson() - Static method in class org.apache.spark.sql.types.CalendarIntervalType
 
prettyJson() - Method in class org.apache.spark.sql.types.DataType
The pretty (i.e.
prettyJson() - Static method in class org.apache.spark.sql.types.DateType
 
prettyJson() - Static method in class org.apache.spark.sql.types.DecimalType
 
prettyJson() - Static method in class org.apache.spark.sql.types.DoubleType
 
prettyJson() - Static method in class org.apache.spark.sql.types.FloatType
 
prettyJson() - Static method in class org.apache.spark.sql.types.IntegerType
 
prettyJson() - Static method in class org.apache.spark.sql.types.LongType
 
prettyJson() - Static method in class org.apache.spark.sql.types.MapType
 
prettyJson() - Static method in class org.apache.spark.sql.types.NullType
 
prettyJson() - Static method in class org.apache.spark.sql.types.NumericType
 
prettyJson() - Static method in class org.apache.spark.sql.types.ShortType
 
prettyJson() - Static method in class org.apache.spark.sql.types.StringType
 
prettyJson() - Static method in class org.apache.spark.sql.types.StructType
 
prettyJson() - Static method in class org.apache.spark.sql.types.TimestampType
 
prettyPrint() - Method in class org.apache.spark.streaming.Duration
 
prev() - Method in class org.apache.spark.rdd.ShuffledRDD
 
primitiveTypes() - Static method in class org.apache.spark.sql.hive.HiveUtils
 
print() - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
print(int) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
print() - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Print the first ten elements of each RDD generated in this DStream.
print(int) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Print the first num elements of each RDD generated in this DStream.
print() - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
print(int) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
print() - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
print(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
print() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
print(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
print() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
print(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
print() - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
print(int) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
print() - Method in class org.apache.spark.streaming.dstream.DStream
Print the first ten elements of each RDD generated in this DStream.
print(int) - Method in class org.apache.spark.streaming.dstream.DStream
Print the first num elements of each RDD generated in this DStream.
printSchema() - Method in class org.apache.spark.sql.Dataset
Prints the schema to the console in a nice tree format.
printSchema() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
printStackTrace() - Static method in exception org.apache.spark.sql.AnalysisException
 
printStackTrace(PrintStream) - Static method in exception org.apache.spark.sql.AnalysisException
 
printStackTrace(PrintWriter) - Static method in exception org.apache.spark.sql.AnalysisException
 
printStackTrace() - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
printStackTrace(PrintStream) - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
printStackTrace(PrintWriter) - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
printStats() - Method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
printTreeString() - Method in class org.apache.spark.sql.types.StructType
 
prob() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.PredictData
 
prob() - Method in class org.apache.spark.mllib.tree.model.Predict
 
ProbabilisticClassificationModel<FeaturesType,M extends ProbabilisticClassificationModel<FeaturesType,M>> - Class in org.apache.spark.ml.classification
:: DeveloperApi ::
ProbabilisticClassificationModel() - Constructor for class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
ProbabilisticClassifier<FeaturesType,E extends ProbabilisticClassifier<FeaturesType,E,M>,M extends ProbabilisticClassificationModel<FeaturesType,M>> - Class in org.apache.spark.ml.classification
:: DeveloperApi ::
ProbabilisticClassifier() - Constructor for class org.apache.spark.ml.classification.ProbabilisticClassifier
 
probabilities() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
probability() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
Probability of each cluster.
probability2prediction(Vector) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
probability2prediction(Vector) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
probability2prediction(Vector) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
probability2prediction(Vector) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
Given a vector of class conditional probabilities, select the predicted label.
probability2prediction(Vector) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
probabilityCol() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
probabilityCol() - Method in interface org.apache.spark.ml.classification.LogisticRegressionSummary
Field in "predictions" which gives the probability of each class as a vector.
probabilityCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
probabilityCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
probabilityCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
probabilityCol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
probabilityCol() - Method in class org.apache.spark.ml.clustering.GaussianMixtureSummary
 
PROCESS_LOCAL() - Static method in class org.apache.spark.scheduler.TaskLocality
 
processAllAvailable() - Method in interface org.apache.spark.sql.ContinuousQuery
Blocks until all available data in the source has been processed an committed to the sink.
processingDelay() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
Time taken for the all jobs of this batch to finish processing from the time they started processing.
processingEndTime() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
 
processingStartTime() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
 
ProcessingTime - Class in org.apache.spark.sql
:: Experimental :: A trigger that runs a query periodically based on the processing time.
ProcessingTime(long) - Constructor for class org.apache.spark.sql.ProcessingTime
 
processStreamByLine(String, InputStream, Function1<String, BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Return and start a daemon thread that processes the content of the input stream line by line.
producedAttributes() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
product() - Method in class org.apache.spark.mllib.recommendation.Rating
 
product(TypeTags.TypeTag<T>) - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's product type (tuples, case classes, etc).
product(Numeric<B>) - Static method in class org.apache.spark.sql.types.StructType
 
productArity() - Static method in class org.apache.spark.Aggregator
 
productArity() - Static method in class org.apache.spark.CleanAccum
 
productArity() - Static method in class org.apache.spark.CleanBroadcast
 
productArity() - Static method in class org.apache.spark.CleanCheckpoint
 
productArity() - Static method in class org.apache.spark.CleanRDD
 
productArity() - Static method in class org.apache.spark.CleanShuffle
 
productArity() - Static method in class org.apache.spark.ExceptionFailure
 
productArity() - Static method in class org.apache.spark.ExecutorLostFailure
 
productArity() - Static method in class org.apache.spark.ExecutorRegistered
 
productArity() - Static method in class org.apache.spark.ExecutorRemoved
 
productArity() - Static method in class org.apache.spark.ExpireDeadHosts
 
productArity() - Static method in class org.apache.spark.FetchFailed
 
productArity() - Static method in class org.apache.spark.graphx.Edge
 
productArity() - Static method in class org.apache.spark.ml.feature.Dot
 
productArity() - Static method in class org.apache.spark.ml.feature.LabeledPoint
 
productArity() - Static method in class org.apache.spark.ml.param.ParamPair
 
productArity() - Static method in class org.apache.spark.mllib.feature.VocabWord
 
productArity() - Static method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
productArity() - Static method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
productArity() - Static method in class org.apache.spark.mllib.linalg.QRDecomposition
 
productArity() - Static method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
productArity() - Static method in class org.apache.spark.mllib.recommendation.Rating
 
productArity() - Static method in class org.apache.spark.mllib.regression.LabeledPoint
 
productArity() - Static method in class org.apache.spark.mllib.stat.test.BinarySample
 
productArity() - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
productArity() - Static method in class org.apache.spark.mllib.tree.model.Split
 
productArity() - Static method in class org.apache.spark.Resubmitted
 
productArity() - Static method in class org.apache.spark.rpc.netty.OnStart
 
productArity() - Static method in class org.apache.spark.rpc.netty.OnStop
 
productArity() - Static method in class org.apache.spark.scheduler.AccumulableInfo
 
productArity() - Static method in class org.apache.spark.scheduler.AllJobsCancelled
 
productArity() - Static method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
productArity() - Static method in class org.apache.spark.scheduler.JobSucceeded
 
productArity() - Static method in class org.apache.spark.scheduler.local.KillTask
 
productArity() - Static method in class org.apache.spark.scheduler.local.ReviveOffers
 
productArity() - Static method in class org.apache.spark.scheduler.local.StatusUpdate
 
productArity() - Static method in class org.apache.spark.scheduler.local.StopExecutor
 
productArity() - Static method in class org.apache.spark.scheduler.ResubmitFailedStages
 
productArity() - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
productArity() - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
productArity() - Static method in class org.apache.spark.scheduler.StopCoordinator
 
productArity() - Static method in class org.apache.spark.sql.DatasetHolder
 
productArity() - Static method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
productArity() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
productArity() - Static method in class org.apache.spark.sql.internal.HiveSerDe
 
productArity() - Static method in class org.apache.spark.sql.jdbc.JdbcType
 
productArity() - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
productArity() - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
productArity() - Static method in class org.apache.spark.sql.ProcessingTime
 
productArity() - Static method in class org.apache.spark.sql.sources.And
 
productArity() - Static method in class org.apache.spark.sql.sources.EqualNullSafe
 
productArity() - Static method in class org.apache.spark.sql.sources.EqualTo
 
productArity() - Static method in class org.apache.spark.sql.sources.GreaterThan
 
productArity() - Static method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
productArity() - Static method in class org.apache.spark.sql.sources.In
 
productArity() - Static method in class org.apache.spark.sql.sources.IsNotNull
 
productArity() - Static method in class org.apache.spark.sql.sources.IsNull
 
productArity() - Static method in class org.apache.spark.sql.sources.LessThan
 
productArity() - Static method in class org.apache.spark.sql.sources.LessThanOrEqual
 
productArity() - Static method in class org.apache.spark.sql.sources.Not
 
productArity() - Static method in class org.apache.spark.sql.sources.Or
 
productArity() - Static method in class org.apache.spark.sql.sources.StringContains
 
productArity() - Static method in class org.apache.spark.sql.sources.StringEndsWith
 
productArity() - Static method in class org.apache.spark.sql.sources.StringStartsWith
 
productArity() - Static method in class org.apache.spark.sql.types.ArrayType
 
productArity() - Static method in class org.apache.spark.sql.types.DecimalType
 
productArity() - Static method in class org.apache.spark.sql.types.MapType
 
productArity() - Static method in class org.apache.spark.sql.types.StructField
 
productArity() - Static method in class org.apache.spark.sql.types.StructType
 
productArity() - Static method in class org.apache.spark.StopMapOutputTracker
 
productArity() - Static method in class org.apache.spark.storage.BlockStatus
 
productArity() - Static method in class org.apache.spark.storage.BlockUpdatedInfo
 
productArity() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
productArity() - Static method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
productArity() - Static method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
productArity() - Static method in class org.apache.spark.storage.RDDBlockId
 
productArity() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
productArity() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
productArity() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
productArity() - Static method in class org.apache.spark.storage.StreamBlockId
 
productArity() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
productArity() - Static method in class org.apache.spark.streaming.Duration
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.AllReceiverIds
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.BatchInfo
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StopAllReceivers
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
productArity() - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
productArity() - Static method in class org.apache.spark.streaming.Time
 
productArity() - Static method in class org.apache.spark.Success
 
productArity() - Static method in class org.apache.spark.TaskCommitDenied
 
productArity() - Static method in class org.apache.spark.TaskKilled
 
productArity() - Static method in class org.apache.spark.TaskResultLost
 
productArity() - Static method in class org.apache.spark.TaskSchedulerIsSet
 
productArity() - Static method in class org.apache.spark.UnknownReason
 
productArity() - Static method in class org.apache.spark.util.MethodIdentifier
 
productArity() - Static method in class org.apache.spark.util.MutablePair
 
productElement(int) - Static method in class org.apache.spark.Aggregator
 
productElement(int) - Static method in class org.apache.spark.CleanAccum
 
productElement(int) - Static method in class org.apache.spark.CleanBroadcast
 
productElement(int) - Static method in class org.apache.spark.CleanCheckpoint
 
productElement(int) - Static method in class org.apache.spark.CleanRDD
 
productElement(int) - Static method in class org.apache.spark.CleanShuffle
 
productElement(int) - Static method in class org.apache.spark.ExceptionFailure
 
productElement(int) - Static method in class org.apache.spark.ExecutorLostFailure
 
productElement(int) - Static method in class org.apache.spark.ExecutorRegistered
 
productElement(int) - Static method in class org.apache.spark.ExecutorRemoved
 
productElement(int) - Static method in class org.apache.spark.ExpireDeadHosts
 
productElement(int) - Static method in class org.apache.spark.FetchFailed
 
productElement(int) - Static method in class org.apache.spark.graphx.Edge
 
productElement(int) - Static method in class org.apache.spark.ml.feature.Dot
 
productElement(int) - Static method in class org.apache.spark.ml.feature.LabeledPoint
 
productElement(int) - Static method in class org.apache.spark.ml.param.ParamPair
 
productElement(int) - Static method in class org.apache.spark.mllib.feature.VocabWord
 
productElement(int) - Static method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
productElement(int) - Static method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
productElement(int) - Static method in class org.apache.spark.mllib.linalg.QRDecomposition
 
productElement(int) - Static method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
productElement(int) - Static method in class org.apache.spark.mllib.recommendation.Rating
 
productElement(int) - Static method in class org.apache.spark.mllib.regression.LabeledPoint
 
productElement(int) - Static method in class org.apache.spark.mllib.stat.test.BinarySample
 
productElement(int) - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
productElement(int) - Static method in class org.apache.spark.mllib.tree.model.Split
 
productElement(int) - Static method in class org.apache.spark.Resubmitted
 
productElement(int) - Static method in class org.apache.spark.rpc.netty.OnStart
 
productElement(int) - Static method in class org.apache.spark.rpc.netty.OnStop
 
productElement(int) - Static method in class org.apache.spark.scheduler.AccumulableInfo
 
productElement(int) - Static method in class org.apache.spark.scheduler.AllJobsCancelled
 
productElement(int) - Static method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
productElement(int) - Static method in class org.apache.spark.scheduler.JobSucceeded
 
productElement(int) - Static method in class org.apache.spark.scheduler.local.KillTask
 
productElement(int) - Static method in class org.apache.spark.scheduler.local.ReviveOffers
 
productElement(int) - Static method in class org.apache.spark.scheduler.local.StatusUpdate
 
productElement(int) - Static method in class org.apache.spark.scheduler.local.StopExecutor
 
productElement(int) - Static method in class org.apache.spark.scheduler.ResubmitFailedStages
 
productElement(int) - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
productElement(int) - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
productElement(int) - Static method in class org.apache.spark.scheduler.StopCoordinator
 
productElement(int) - Static method in class org.apache.spark.sql.DatasetHolder
 
productElement(int) - Static method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
productElement(int) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
productElement(int) - Static method in class org.apache.spark.sql.internal.HiveSerDe
 
productElement(int) - Static method in class org.apache.spark.sql.jdbc.JdbcType
 
productElement(int) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
productElement(int) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
productElement(int) - Static method in class org.apache.spark.sql.ProcessingTime
 
productElement(int) - Static method in class org.apache.spark.sql.sources.And
 
productElement(int) - Static method in class org.apache.spark.sql.sources.EqualNullSafe
 
productElement(int) - Static method in class org.apache.spark.sql.sources.EqualTo
 
productElement(int) - Static method in class org.apache.spark.sql.sources.GreaterThan
 
productElement(int) - Static method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
productElement(int) - Static method in class org.apache.spark.sql.sources.In
 
productElement(int) - Static method in class org.apache.spark.sql.sources.IsNotNull
 
productElement(int) - Static method in class org.apache.spark.sql.sources.IsNull
 
productElement(int) - Static method in class org.apache.spark.sql.sources.LessThan
 
productElement(int) - Static method in class org.apache.spark.sql.sources.LessThanOrEqual
 
productElement(int) - Static method in class org.apache.spark.sql.sources.Not
 
productElement(int) - Static method in class org.apache.spark.sql.sources.Or
 
productElement(int) - Static method in class org.apache.spark.sql.sources.StringContains
 
productElement(int) - Static method in class org.apache.spark.sql.sources.StringEndsWith
 
productElement(int) - Static method in class org.apache.spark.sql.sources.StringStartsWith
 
productElement(int) - Static method in class org.apache.spark.sql.types.ArrayType
 
productElement(int) - Static method in class org.apache.spark.sql.types.DecimalType
 
productElement(int) - Static method in class org.apache.spark.sql.types.MapType
 
productElement(int) - Static method in class org.apache.spark.sql.types.StructField
 
productElement(int) - Static method in class org.apache.spark.sql.types.StructType
 
productElement(int) - Static method in class org.apache.spark.StopMapOutputTracker
 
productElement(int) - Static method in class org.apache.spark.storage.BlockStatus
 
productElement(int) - Static method in class org.apache.spark.storage.BlockUpdatedInfo
 
productElement(int) - Static method in class org.apache.spark.storage.BroadcastBlockId
 
productElement(int) - Static method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
productElement(int) - Static method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
productElement(int) - Static method in class org.apache.spark.storage.RDDBlockId
 
productElement(int) - Static method in class org.apache.spark.storage.ShuffleBlockId
 
productElement(int) - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
productElement(int) - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
productElement(int) - Static method in class org.apache.spark.storage.StreamBlockId
 
productElement(int) - Static method in class org.apache.spark.storage.TaskResultBlockId
 
productElement(int) - Static method in class org.apache.spark.streaming.Duration
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.AllReceiverIds
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.BatchInfo
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StopAllReceivers
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
productElement(int) - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
productElement(int) - Static method in class org.apache.spark.streaming.Time
 
productElement(int) - Static method in class org.apache.spark.Success
 
productElement(int) - Static method in class org.apache.spark.TaskCommitDenied
 
productElement(int) - Static method in class org.apache.spark.TaskKilled
 
productElement(int) - Static method in class org.apache.spark.TaskResultLost
 
productElement(int) - Static method in class org.apache.spark.TaskSchedulerIsSet
 
productElement(int) - Static method in class org.apache.spark.UnknownReason
 
productElement(int) - Static method in class org.apache.spark.util.MethodIdentifier
 
productElement(int) - Static method in class org.apache.spark.util.MutablePair
 
productFeatures() - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
productIterator() - Static method in class org.apache.spark.Aggregator
 
productIterator() - Static method in class org.apache.spark.CleanAccum
 
productIterator() - Static method in class org.apache.spark.CleanBroadcast
 
productIterator() - Static method in class org.apache.spark.CleanCheckpoint
 
productIterator() - Static method in class org.apache.spark.CleanRDD
 
productIterator() - Static method in class org.apache.spark.CleanShuffle
 
productIterator() - Static method in class org.apache.spark.ExceptionFailure
 
productIterator() - Static method in class org.apache.spark.ExecutorLostFailure
 
productIterator() - Static method in class org.apache.spark.ExecutorRegistered
 
productIterator() - Static method in class org.apache.spark.ExecutorRemoved
 
productIterator() - Static method in class org.apache.spark.ExpireDeadHosts
 
productIterator() - Static method in class org.apache.spark.FetchFailed
 
productIterator() - Static method in class org.apache.spark.graphx.Edge
 
productIterator() - Static method in class org.apache.spark.ml.feature.Dot
 
productIterator() - Static method in class org.apache.spark.ml.feature.LabeledPoint
 
productIterator() - Static method in class org.apache.spark.ml.param.ParamPair
 
productIterator() - Static method in class org.apache.spark.mllib.feature.VocabWord
 
productIterator() - Static method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
productIterator() - Static method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
productIterator() - Static method in class org.apache.spark.mllib.linalg.QRDecomposition
 
productIterator() - Static method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
productIterator() - Static method in class org.apache.spark.mllib.recommendation.Rating
 
productIterator() - Static method in class org.apache.spark.mllib.regression.LabeledPoint
 
productIterator() - Static method in class org.apache.spark.mllib.stat.test.BinarySample
 
productIterator() - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
productIterator() - Static method in class org.apache.spark.mllib.tree.model.Split
 
productIterator() - Static method in class org.apache.spark.Resubmitted
 
productIterator() - Static method in class org.apache.spark.rpc.netty.OnStart
 
productIterator() - Static method in class org.apache.spark.rpc.netty.OnStop
 
productIterator() - Static method in class org.apache.spark.scheduler.AccumulableInfo
 
productIterator() - Static method in class org.apache.spark.scheduler.AllJobsCancelled
 
productIterator() - Static method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
productIterator() - Static method in class org.apache.spark.scheduler.JobSucceeded
 
productIterator() - Static method in class org.apache.spark.scheduler.local.KillTask
 
productIterator() - Static method in class org.apache.spark.scheduler.local.ReviveOffers
 
productIterator() - Static method in class org.apache.spark.scheduler.local.StatusUpdate
 
productIterator() - Static method in class org.apache.spark.scheduler.local.StopExecutor
 
productIterator() - Static method in class org.apache.spark.scheduler.ResubmitFailedStages
 
productIterator() - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
productIterator() - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
productIterator() - Static method in class org.apache.spark.scheduler.StopCoordinator
 
productIterator() - Static method in class org.apache.spark.sql.DatasetHolder
 
productIterator() - Static method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
productIterator() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
productIterator() - Static method in class org.apache.spark.sql.internal.HiveSerDe
 
productIterator() - Static method in class org.apache.spark.sql.jdbc.JdbcType
 
productIterator() - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
productIterator() - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
productIterator() - Static method in class org.apache.spark.sql.ProcessingTime
 
productIterator() - Static method in class org.apache.spark.sql.sources.And
 
productIterator() - Static method in class org.apache.spark.sql.sources.EqualNullSafe
 
productIterator() - Static method in class org.apache.spark.sql.sources.EqualTo
 
productIterator() - Static method in class org.apache.spark.sql.sources.GreaterThan
 
productIterator() - Static method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
productIterator() - Static method in class org.apache.spark.sql.sources.In
 
productIterator() - Static method in class org.apache.spark.sql.sources.IsNotNull
 
productIterator() - Static method in class org.apache.spark.sql.sources.IsNull
 
productIterator() - Static method in class org.apache.spark.sql.sources.LessThan
 
productIterator() - Static method in class org.apache.spark.sql.sources.LessThanOrEqual
 
productIterator() - Static method in class org.apache.spark.sql.sources.Not
 
productIterator() - Static method in class org.apache.spark.sql.sources.Or
 
productIterator() - Static method in class org.apache.spark.sql.sources.StringContains
 
productIterator() - Static method in class org.apache.spark.sql.sources.StringEndsWith
 
productIterator() - Static method in class org.apache.spark.sql.sources.StringStartsWith
 
productIterator() - Static method in class org.apache.spark.sql.types.ArrayType
 
productIterator() - Static method in class org.apache.spark.sql.types.DecimalType
 
productIterator() - Static method in class org.apache.spark.sql.types.MapType
 
productIterator() - Static method in class org.apache.spark.sql.types.StructField
 
productIterator() - Static method in class org.apache.spark.sql.types.StructType
 
productIterator() - Static method in class org.apache.spark.StopMapOutputTracker
 
productIterator() - Static method in class org.apache.spark.storage.BlockStatus
 
productIterator() - Static method in class org.apache.spark.storage.BlockUpdatedInfo
 
productIterator() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
productIterator() - Static method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
productIterator() - Static method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
productIterator() - Static method in class org.apache.spark.storage.RDDBlockId
 
productIterator() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
productIterator() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
productIterator() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
productIterator() - Static method in class org.apache.spark.storage.StreamBlockId
 
productIterator() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
productIterator() - Static method in class org.apache.spark.streaming.Duration
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.AllReceiverIds
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.BatchInfo
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StopAllReceivers
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
productIterator() - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
productIterator() - Static method in class org.apache.spark.streaming.Time
 
productIterator() - Static method in class org.apache.spark.Success
 
productIterator() - Static method in class org.apache.spark.TaskCommitDenied
 
productIterator() - Static method in class org.apache.spark.TaskKilled
 
productIterator() - Static method in class org.apache.spark.TaskResultLost
 
productIterator() - Static method in class org.apache.spark.TaskSchedulerIsSet
 
productIterator() - Static method in class org.apache.spark.UnknownReason
 
productIterator() - Static method in class org.apache.spark.util.MethodIdentifier
 
productIterator() - Static method in class org.apache.spark.util.MutablePair
 
productPrefix() - Static method in class org.apache.spark.Aggregator
 
productPrefix() - Static method in class org.apache.spark.CleanAccum
 
productPrefix() - Static method in class org.apache.spark.CleanBroadcast
 
productPrefix() - Static method in class org.apache.spark.CleanCheckpoint
 
productPrefix() - Static method in class org.apache.spark.CleanRDD
 
productPrefix() - Static method in class org.apache.spark.CleanShuffle
 
productPrefix() - Static method in class org.apache.spark.ExceptionFailure
 
productPrefix() - Static method in class org.apache.spark.ExecutorLostFailure
 
productPrefix() - Static method in class org.apache.spark.ExecutorRegistered
 
productPrefix() - Static method in class org.apache.spark.ExecutorRemoved
 
productPrefix() - Static method in class org.apache.spark.ExpireDeadHosts
 
productPrefix() - Static method in class org.apache.spark.FetchFailed
 
productPrefix() - Static method in class org.apache.spark.graphx.Edge
 
productPrefix() - Static method in class org.apache.spark.ml.feature.Dot
 
productPrefix() - Static method in class org.apache.spark.ml.feature.LabeledPoint
 
productPrefix() - Static method in class org.apache.spark.ml.param.ParamPair
 
productPrefix() - Static method in class org.apache.spark.mllib.feature.VocabWord
 
productPrefix() - Static method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
productPrefix() - Static method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
productPrefix() - Static method in class org.apache.spark.mllib.linalg.QRDecomposition
 
productPrefix() - Static method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
productPrefix() - Static method in class org.apache.spark.mllib.recommendation.Rating
 
productPrefix() - Static method in class org.apache.spark.mllib.regression.LabeledPoint
 
productPrefix() - Static method in class org.apache.spark.mllib.stat.test.BinarySample
 
productPrefix() - Static method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
productPrefix() - Static method in class org.apache.spark.mllib.tree.model.Split
 
productPrefix() - Static method in class org.apache.spark.Resubmitted
 
productPrefix() - Static method in class org.apache.spark.rpc.netty.OnStart
 
productPrefix() - Static method in class org.apache.spark.rpc.netty.OnStop
 
productPrefix() - Static method in class org.apache.spark.scheduler.AccumulableInfo
 
productPrefix() - Static method in class org.apache.spark.scheduler.AllJobsCancelled
 
productPrefix() - Static method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
productPrefix() - Static method in class org.apache.spark.scheduler.JobSucceeded
 
productPrefix() - Static method in class org.apache.spark.scheduler.local.KillTask
 
productPrefix() - Static method in class org.apache.spark.scheduler.local.ReviveOffers
 
productPrefix() - Static method in class org.apache.spark.scheduler.local.StatusUpdate
 
productPrefix() - Static method in class org.apache.spark.scheduler.local.StopExecutor
 
productPrefix() - Static method in class org.apache.spark.scheduler.ResubmitFailedStages
 
productPrefix() - Static method in class org.apache.spark.scheduler.RuntimePercentage
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerJobStart
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
productPrefix() - Static method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
productPrefix() - Static method in class org.apache.spark.scheduler.StopCoordinator
 
productPrefix() - Static method in class org.apache.spark.sql.DatasetHolder
 
productPrefix() - Static method in class org.apache.spark.sql.expressions.UserDefinedFunction
 
productPrefix() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
productPrefix() - Static method in class org.apache.spark.sql.internal.HiveSerDe
 
productPrefix() - Static method in class org.apache.spark.sql.jdbc.JdbcType
 
productPrefix() - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
productPrefix() - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
productPrefix() - Static method in class org.apache.spark.sql.ProcessingTime
 
productPrefix() - Static method in class org.apache.spark.sql.sources.And
 
productPrefix() - Static method in class org.apache.spark.sql.sources.EqualNullSafe
 
productPrefix() - Static method in class org.apache.spark.sql.sources.EqualTo
 
productPrefix() - Static method in class org.apache.spark.sql.sources.GreaterThan
 
productPrefix() - Static method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
productPrefix() - Static method in class org.apache.spark.sql.sources.In
 
productPrefix() - Static method in class org.apache.spark.sql.sources.IsNotNull
 
productPrefix() - Static method in class org.apache.spark.sql.sources.IsNull
 
productPrefix() - Static method in class org.apache.spark.sql.sources.LessThan
 
productPrefix() - Static method in class org.apache.spark.sql.sources.LessThanOrEqual
 
productPrefix() - Static method in class org.apache.spark.sql.sources.Not
 
productPrefix() - Static method in class org.apache.spark.sql.sources.Or
 
productPrefix() - Static method in class org.apache.spark.sql.sources.StringContains
 
productPrefix() - Static method in class org.apache.spark.sql.sources.StringEndsWith
 
productPrefix() - Static method in class org.apache.spark.sql.sources.StringStartsWith
 
productPrefix() - Static method in class org.apache.spark.sql.types.ArrayType
 
productPrefix() - Static method in class org.apache.spark.sql.types.DecimalType
 
productPrefix() - Static method in class org.apache.spark.sql.types.MapType
 
productPrefix() - Static method in class org.apache.spark.sql.types.StructField
 
productPrefix() - Static method in class org.apache.spark.sql.types.StructType
 
productPrefix() - Static method in class org.apache.spark.StopMapOutputTracker
 
productPrefix() - Static method in class org.apache.spark.storage.BlockStatus
 
productPrefix() - Static method in class org.apache.spark.storage.BlockUpdatedInfo
 
productPrefix() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
productPrefix() - Static method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
productPrefix() - Static method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
productPrefix() - Static method in class org.apache.spark.storage.RDDBlockId
 
productPrefix() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
productPrefix() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
productPrefix() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
productPrefix() - Static method in class org.apache.spark.storage.StreamBlockId
 
productPrefix() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
productPrefix() - Static method in class org.apache.spark.streaming.Duration
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.AllReceiverIds
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.BatchInfo
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.GetAllReceiverInfo
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StopAllReceivers
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
productPrefix() - Static method in class org.apache.spark.streaming.scheduler.StreamInputInfo
 
productPrefix() - Static method in class org.apache.spark.streaming.Time
 
productPrefix() - Static method in class org.apache.spark.Success
 
productPrefix() - Static method in class org.apache.spark.TaskCommitDenied
 
productPrefix() - Static method in class org.apache.spark.TaskKilled
 
productPrefix() - Static method in class org.apache.spark.TaskResultLost
 
productPrefix() - Static method in class org.apache.spark.TaskSchedulerIsSet
 
productPrefix() - Static method in class org.apache.spark.UnknownReason
 
productPrefix() - Static method in class org.apache.spark.util.MethodIdentifier
 
productPrefix() - Static method in class org.apache.spark.util.MutablePair
 
project(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
project(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
project(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
project(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
properties() - Method in class org.apache.spark.scheduler.SparkListenerJobStart
 
properties() - Method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
propertiesFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
propertiesToJson(Properties) - Static method in class org.apache.spark.util.JsonProtocol
 
proxyBase() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter
 
PrunedFilteredScan - Interface in org.apache.spark.sql.sources
::DeveloperApi:: A BaseRelation that can eliminate unneeded columns and filter using selected predicates before producing an RDD containing all matching tuples as Row objects.
PrunedScan - Interface in org.apache.spark.sql.sources
::DeveloperApi:: A BaseRelation that can eliminate unneeded columns before producing an RDD containing all of its tuples as Row objects.
Pseudorandom - Interface in org.apache.spark.util.random
:: DeveloperApi :: A class with pseudorandom behavior.
put(Object) - Static method in class org.apache.spark.api.r.JVMObjectTracker
 
put(ParamPair<?>...) - Method in class org.apache.spark.ml.param.ParamMap
Puts a list of param pairs (overwrites if the input params exists).
put(Param<T>, T) - Method in class org.apache.spark.ml.param.ParamMap
Puts a (param, value) pair (overwrites if the input param exists).
put(Seq<ParamPair<?>>) - Method in class org.apache.spark.ml.param.ParamMap
Puts a list of param pairs (overwrites if the input params exists).
put(Object) - Method in class org.apache.spark.util.sketch.BloomFilter
Puts an item into this BloomFilter.
putBinary(byte[]) - Method in class org.apache.spark.util.sketch.BloomFilter
A specialized variant of BloomFilter.put(Object) that only supports byte array items.
putBoolean(String, boolean) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Boolean.
putBooleanArray(String, boolean[]) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Boolean array.
putDouble(String, double) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Double.
putDoubleArray(String, double[]) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Double array.
putLong(String, long) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Long.
putLong(long) - Method in class org.apache.spark.util.sketch.BloomFilter
A specialized variant of BloomFilter.put(Object) that only supports long items.
putLongArray(String, long[]) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Long array.
putMetadata(String, Metadata) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Metadata.
putMetadataArray(String, Metadata[]) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a Metadata array.
putNull(String) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a null.
putString(String, String) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a String.
putString(String) - Method in class org.apache.spark.util.sketch.BloomFilter
A specialized variant of BloomFilter.put(Object) that only supports String items.
putStringArray(String, String[]) - Method in class org.apache.spark.sql.types.MetadataBuilder
Puts a String array.
pValue() - Method in class org.apache.spark.mllib.stat.test.ChiSqTestResult
 
pValue() - Method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTestResult
 
pValue() - Method in interface org.apache.spark.mllib.stat.test.TestResult
The probability of obtaining a test statistic result at least as extreme as the one that was actually observed, assuming that the null hypothesis is true.
pValues() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionTrainingSummary
Two-sided p-value of estimated coefficients and intercept.
pValues() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Two-sided p-value of estimated coefficients and intercept.
pyUDT() - Method in class org.apache.spark.mllib.linalg.VectorUDT
 

Q

Q() - Method in class org.apache.spark.mllib.linalg.QRDecomposition
 
QRDecomposition<QType,RType> - Class in org.apache.spark.mllib.linalg
:: Experimental :: Represents QR factors.
QRDecomposition(QType, RType) - Constructor for class org.apache.spark.mllib.linalg.QRDecomposition
 
quantileCalculationStrategy() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
QuantileDiscretizer - Class in org.apache.spark.ml.feature
:: Experimental :: QuantileDiscretizer takes a column with continuous features and outputs a column with binned categorical features.
QuantileDiscretizer(String) - Constructor for class org.apache.spark.ml.feature.QuantileDiscretizer
 
QuantileDiscretizer() - Constructor for class org.apache.spark.ml.feature.QuantileDiscretizer
 
quantileProbabilities() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
quantileProbabilities() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
quantiles() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
quantilesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
quantilesCol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
QuantileStrategy - Class in org.apache.spark.mllib.tree.configuration
Enum for selecting the quantile calculation strategy
QuantileStrategy() - Constructor for class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
quarter(Column) - Static method in class org.apache.spark.sql.functions
Extracts the quarter as an integer from a given date/timestamp/string.
query() - Method in exception org.apache.spark.sql.ContinuousQueryException
 
query() - Method in class org.apache.spark.sql.util.ContinuousQueryListener.QueryProgress
 
query() - Method in class org.apache.spark.sql.util.ContinuousQueryListener.QueryStarted
 
query() - Method in class org.apache.spark.sql.util.ContinuousQueryListener.QueryTerminated
 
queryExecution() - Method in class org.apache.spark.sql.Dataset
 
queryExecution() - Method in class org.apache.spark.sql.KeyValueGroupedDataset
 
QueryExecutionListener - Interface in org.apache.spark.sql.util
:: Experimental :: The interface of query execution listener that can be used to analyze execution metrics.
queryName(String) - Method in class org.apache.spark.sql.DataFrameWriter
:: Experimental :: Specifies the name of the ContinuousQuery that can be started with startStream().
queueStream(Queue<JavaRDD<T>>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from an queue of RDDs.
queueStream(Queue<JavaRDD<T>>, boolean) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from an queue of RDDs.
queueStream(Queue<JavaRDD<T>>, boolean, JavaRDD<T>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from an queue of RDDs.
queueStream(Queue<RDD<T>>, boolean, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Create an input stream from a queue of RDDs.
queueStream(Queue<RDD<T>>, boolean, RDD<T>, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Create an input stream from a queue of RDDs.
quot(Decimal, Decimal) - Method in class org.apache.spark.sql.types.Decimal.DecimalAsIfIntegral$
 
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.DB2Dialect
 
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.DerbyDialect
 
quoteIdentifier(String) - Method in class org.apache.spark.sql.jdbc.JdbcDialect
Quotes the identifier.
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.MsSqlServerDialect
 
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.MySQLDialect
 
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.NoopDialect
 
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.OracleDialect
 
quoteIdentifier(String) - Static method in class org.apache.spark.sql.jdbc.PostgresDialect
 

R

R() - Method in class org.apache.spark.mllib.linalg.QRDecomposition
 
r2() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Returns R^2^, the coefficient of determination.
r2() - Method in class org.apache.spark.mllib.evaluation.RegressionMetrics
Returns R^2^, the unadjusted coefficient of determination.
RACK_LOCAL() - Static method in class org.apache.spark.scheduler.TaskLocality
 
rand(int, int, Random) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
Generate a DenseMatrix consisting of i.i.d. uniform random numbers.
rand(int, int, Random) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a DenseMatrix consisting of i.i.d. uniform random numbers.
rand(int, int, Random) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate a DenseMatrix consisting of i.i.d. uniform random numbers.
rand(int, int, Random) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a DenseMatrix consisting of i.i.d. uniform random numbers.
rand(long) - Static method in class org.apache.spark.sql.functions
Generate a random column with i.i.d.
rand() - Static method in class org.apache.spark.sql.functions
Generate a random column with i.i.d.
randn(int, int, Random) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
Generate a DenseMatrix consisting of i.i.d. gaussian random numbers.
randn(int, int, Random) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a DenseMatrix consisting of i.i.d. gaussian random numbers.
randn(int, int, Random) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate a DenseMatrix consisting of i.i.d. gaussian random numbers.
randn(int, int, Random) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a DenseMatrix consisting of i.i.d. gaussian random numbers.
randn(long) - Static method in class org.apache.spark.sql.functions
Generate a column with i.i.d.
randn() - Static method in class org.apache.spark.sql.functions
Generate a column with i.i.d.
RANDOM() - Static method in class org.apache.spark.mllib.clustering.KMeans
 
random() - Static method in class org.apache.spark.util.Utils
 
RandomDataGenerator<T> - Interface in org.apache.spark.mllib.random
:: DeveloperApi :: Trait for random data generators that generate i.i.d.
RandomForest - Class in org.apache.spark.ml.tree.impl
ALGORITHM
RandomForest() - Constructor for class org.apache.spark.ml.tree.impl.RandomForest
 
RandomForest - Class in org.apache.spark.mllib.tree
A class that implements a Random Forest learning algorithm for classification and regression.
RandomForest(Strategy, int, String, int) - Constructor for class org.apache.spark.mllib.tree.RandomForest
 
RandomForestClassificationModel - Class in org.apache.spark.ml.classification
:: Experimental :: Random Forest model for classification.
RandomForestClassifier - Class in org.apache.spark.ml.classification
:: Experimental :: Random Forest learning algorithm for classification.
RandomForestClassifier(String) - Constructor for class org.apache.spark.ml.classification.RandomForestClassifier
 
RandomForestClassifier() - Constructor for class org.apache.spark.ml.classification.RandomForestClassifier
 
RandomForestModel - Class in org.apache.spark.mllib.tree.model
Represents a random forest model.
RandomForestModel(Enumeration.Value, DecisionTreeModel[]) - Constructor for class org.apache.spark.mllib.tree.model.RandomForestModel
 
RandomForestRegressionModel - Class in org.apache.spark.ml.regression
:: Experimental :: Random Forest model for regression.
RandomForestRegressor - Class in org.apache.spark.ml.regression
:: Experimental :: Random Forest learning algorithm for regression.
RandomForestRegressor(String) - Constructor for class org.apache.spark.ml.regression.RandomForestRegressor
 
RandomForestRegressor() - Constructor for class org.apache.spark.ml.regression.RandomForestRegressor
 
randomize(TraversableOnce<T>, ClassTag<T>) - Static method in class org.apache.spark.util.Utils
Shuffle the elements of a collection into a random order, returning the result in a new collection.
randomizeInPlace(Object, Random) - Static method in class org.apache.spark.util.Utils
Shuffle the elements of an array into a random order, modifying the original array.
randomJavaRDD(JavaSparkContext, RandomDataGenerator<T>, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
:: DeveloperApi :: Generates an RDD comprised of i.i.d. samples produced by the input RandomDataGenerator.
randomJavaRDD(JavaSparkContext, RandomDataGenerator<T>, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
randomJavaRDD(JavaSparkContext, RandomDataGenerator<T>, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
randomJavaVectorRDD(JavaSparkContext, RandomDataGenerator<Object>, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
randomJavaVectorRDD(JavaSparkContext, RandomDataGenerator<Object>, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
randomJavaVectorRDD(JavaSparkContext, RandomDataGenerator<Object>, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
randomRDD(SparkContext, RandomDataGenerator<T>, long, int, long, ClassTag<T>) - Static method in class org.apache.spark.mllib.random.RandomRDDs
:: DeveloperApi :: Generates an RDD comprised of i.i.d. samples produced by the input RandomDataGenerator.
RandomRDDs - Class in org.apache.spark.mllib.random
Generator methods for creating RDDs comprised of i.i.d. samples from some distribution.
RandomRDDs() - Constructor for class org.apache.spark.mllib.random.RandomRDDs
 
RandomSampler<T,U> - Interface in org.apache.spark.util.random
:: DeveloperApi :: A pseudorandom sampler.
randomSplit(double[]) - Method in class org.apache.spark.api.java.JavaRDD
Randomly splits this RDD with the provided weights.
randomSplit(double[], long) - Method in class org.apache.spark.api.java.JavaRDD
Randomly splits this RDD with the provided weights.
randomSplit(double[], long) - Static method in class org.apache.spark.api.r.RRDD
 
randomSplit(double[], long) - Static method in class org.apache.spark.graphx.EdgeRDD
 
randomSplit(double[], long) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
randomSplit(double[], long) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
randomSplit(double[], long) - Static method in class org.apache.spark.graphx.VertexRDD
 
randomSplit(double[], long) - Static method in class org.apache.spark.rdd.HadoopRDD
 
randomSplit(double[], long) - Static method in class org.apache.spark.rdd.JdbcRDD
 
randomSplit(double[], long) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
randomSplit(double[], long) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
randomSplit(double[], long) - Method in class org.apache.spark.rdd.RDD
Randomly splits this RDD with the provided weights.
randomSplit(double[], long) - Method in class org.apache.spark.sql.Dataset
Randomly splits this Dataset with the provided weights.
randomSplit(double[]) - Method in class org.apache.spark.sql.Dataset
Randomly splits this Dataset with the provided weights.
randomSplit$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
randomSplit$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
randomSplit$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
randomSplit$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
randomSplit$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
randomSplit$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
randomSplit$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
randomSplit$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
randomSplit$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
randomSplitAsList(double[], long) - Method in class org.apache.spark.sql.Dataset
Returns a Java list that contains randomly split Dataset with the provided weights.
randomVectorRDD(SparkContext, RandomDataGenerator<Object>, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
:: DeveloperApi :: Generates an RDD[Vector] with vectors containing i.i.d. samples produced by the input RandomDataGenerator.
range(long, long, long, int) - Method in class org.apache.spark.SparkContext
Creates a new RDD[Long] containing elements from start to end(exclusive), increased by step every element.
range(long) - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from 0 to end (exclusive) with step value 1.
range(long, long) - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from start to end (exclusive) with step value 1.
range(long, long, long) - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from start to end (exclusive) with an step value.
range(long, long, long, int) - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from start to end (exclusive) with an step value, with partition number specified.
range(long) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from 0 to end (exclusive) with step value 1.
range(long, long) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from start to end (exclusive) with step value 1.
range(long, long, long) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from start to end (exclusive) with an step value.
range(long, long, long, int) - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Creates a Dataset with a single LongType column named id, containing elements in an range from start to end (exclusive) with an step value, with partition number specified.
rangeBetween(long, long) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the frame boundaries, from start (inclusive) to end (inclusive).
RangeDependency<T> - Class in org.apache.spark
:: DeveloperApi :: Represents a one-to-one dependency between ranges of partitions in the parent and child RDDs.
RangeDependency(RDD<T>, int, int, int) - Constructor for class org.apache.spark.RangeDependency
 
RangePartitioner<K,V> - Class in org.apache.spark
A Partitioner that partitions sortable records by range into roughly equal ranges.
RangePartitioner(int, RDD<? extends Product2<K, V>>, boolean, Ordering<K>, ClassTag<K>) - Constructor for class org.apache.spark.RangePartitioner
 
rank() - Method in class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
rank() - Static method in class org.apache.spark.ml.recommendation.ALS
 
rank() - Method in class org.apache.spark.ml.recommendation.ALSModel
 
rank() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
The numeric rank of the fitted linear model.
rank() - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
rank() - Static method in class org.apache.spark.sql.functions
Window function: returns the rank of rows within a window partition.
RankingMetrics<T> - Class in org.apache.spark.mllib.evaluation
::Experimental:: Evaluator for ranking algorithms.
RankingMetrics(RDD<Tuple2<Object, Object>>, ClassTag<T>) - Constructor for class org.apache.spark.mllib.evaluation.RankingMetrics
 
rateController() - Method in class org.apache.spark.streaming.dstream.InputDStream
 
rateController() - Method in class org.apache.spark.streaming.dstream.ReceiverInputDStream
Asynchronously maintains & sends new rate limits to the receiver through the receiver tracker.
rating() - Method in class org.apache.spark.ml.recommendation.ALS.Rating
 
Rating - Class in org.apache.spark.mllib.recommendation
A more compact class to represent a rating than Tuple3[Int, Int, Double].
Rating(int, int, double) - Constructor for class org.apache.spark.mllib.recommendation.Rating
 
rating() - Method in class org.apache.spark.mllib.recommendation.Rating
 
ratingCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
raw2prediction(Vector) - Method in class org.apache.spark.ml.classification.ClassificationModel
Given a vector of raw predictions, select the predicted label.
raw2prediction(Vector) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
raw2prediction(Vector) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
raw2prediction(Vector) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
raw2prediction(Vector) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
raw2prediction(Vector) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
raw2probability(Vector) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
raw2probability(Vector) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
raw2probability(Vector) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
raw2probability(Vector) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
Non-in-place version of raw2probabilityInPlace()
raw2probability(Vector) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
raw2probabilityInPlace(Vector) - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
raw2probabilityInPlace(Vector) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
raw2probabilityInPlace(Vector) - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
raw2probabilityInPlace(Vector) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
Estimate the probability of each class given the raw prediction, doing the computation in-place.
raw2probabilityInPlace(Vector) - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
rawPredictionCol() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
rawPredictionCol() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
rawSocketStream(String, int, StorageLevel) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from network source hostname:port, where data is received as serialized blocks (serialized using the Spark's serializer) that can be directly pushed into the block manager without deserializing them.
rawSocketStream(String, int) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from network source hostname:port, where data is received as serialized blocks (serialized using the Spark's serializer) that can be directly pushed into the block manager without deserializing them.
rawSocketStream(String, int, StorageLevel, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Create a input stream from network source hostname:port, where data is received as serialized blocks (serialized using the Spark's serializer) that can be directly pushed into the block manager without deserializing them.
RawTextHelper - Class in org.apache.spark.streaming.util
 
RawTextHelper() - Constructor for class org.apache.spark.streaming.util.RawTextHelper
 
RawTextSender - Class in org.apache.spark.streaming.util
A helper program that sends blocks of Kryo-serialized text strings out on a socket at a specified rate.
RawTextSender() - Constructor for class org.apache.spark.streaming.util.RawTextSender
 
rdd() - Method in class org.apache.spark.api.java.JavaDoubleRDD
 
rdd() - Method in class org.apache.spark.api.java.JavaPairRDD
 
rdd() - Method in class org.apache.spark.api.java.JavaRDD
 
rdd() - Method in interface org.apache.spark.api.java.JavaRDDLike
 
rdd() - Method in class org.apache.spark.Dependency
 
rdd() - Method in class org.apache.spark.NarrowDependency
 
RDD<T> - Class in org.apache.spark.rdd
A Resilient Distributed Dataset (RDD), the basic abstraction in Spark.
RDD(SparkContext, Seq<Dependency<?>>, ClassTag<T>) - Constructor for class org.apache.spark.rdd.RDD
 
RDD(RDD<?>, ClassTag<T>) - Constructor for class org.apache.spark.rdd.RDD
Construct an RDD with just a one-to-one dependency on one parent
rdd() - Method in class org.apache.spark.ShuffleDependency
 
rdd() - Method in class org.apache.spark.sql.Dataset
Represents the content of the Dataset as an RDD of T.
RDD() - Static method in class org.apache.spark.storage.BlockId
 
RDDBlockId - Class in org.apache.spark.storage
 
RDDBlockId(int, int) - Constructor for class org.apache.spark.storage.RDDBlockId
 
rddBlocks() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
rddBlocks() - Method in class org.apache.spark.storage.StorageStatus
Return the RDD blocks stored in this block manager.
rddBlocksById(int) - Method in class org.apache.spark.storage.StorageStatus
Return the blocks that belong to the given RDD stored in this block manager.
RDDDataDistribution - Class in org.apache.spark.status.api.v1
 
RDDFunctions<T> - Class in org.apache.spark.mllib.rdd
Machine learning specific RDD functions.
RDDFunctions(RDD<T>, ClassTag<T>) - Constructor for class org.apache.spark.mllib.rdd.RDDFunctions
 
rddId() - Method in class org.apache.spark.CleanCheckpoint
 
rddId() - Method in class org.apache.spark.CleanRDD
 
rddId() - Method in class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
rddId() - Method in class org.apache.spark.storage.BlockManagerMessages.RemoveRdd
 
rddId() - Method in class org.apache.spark.storage.RDDBlockId
 
RDDInfo - Class in org.apache.spark.storage
 
RDDInfo(int, String, int, StorageLevel, Seq<Object>, String, Option<org.apache.spark.rdd.RDDOperationScope>) - Constructor for class org.apache.spark.storage.RDDInfo
 
rddInfoFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
rddInfoList() - Method in class org.apache.spark.ui.storage.StorageListener
Filter RDD info to include only those with cached partitions
rddInfos() - Method in class org.apache.spark.scheduler.StageInfo
 
rddInfoToJson(RDDInfo) - Static method in class org.apache.spark.util.JsonProtocol
 
RDDPartitionInfo - Class in org.apache.spark.status.api.v1
 
rdds() - Method in class org.apache.spark.rdd.CoGroupedRDD
 
rdds() - Method in class org.apache.spark.rdd.UnionRDD
 
RDDStorageInfo - Class in org.apache.spark.status.api.v1
 
rddStorageLevel(int) - Method in class org.apache.spark.storage.StorageStatus
Return the storage level, if any, used by the given RDD in this block manager.
rddToAsyncRDDActions(RDD<T>, ClassTag<T>) - Static method in class org.apache.spark.rdd.RDD
 
rddToDatasetHolder(RDD<T>, Encoder<T>) - Method in class org.apache.spark.sql.SQLImplicits
Creates a Dataset from an RDD.
rddToOrderedRDDFunctions(RDD<Tuple2<K, V>>, Ordering<K>, ClassTag<K>, ClassTag<V>) - Static method in class org.apache.spark.rdd.RDD
 
rddToPairRDDFunctions(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>, Ordering<K>) - Static method in class org.apache.spark.rdd.RDD
 
rddToSequenceFileRDDFunctions(RDD<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>, <any>, <any>) - Static method in class org.apache.spark.rdd.RDD
 
read() - Method in class org.apache.spark.io.LZ4BlockInputStream
 
read(byte[], int, int) - Method in class org.apache.spark.io.LZ4BlockInputStream
 
read(byte[]) - Method in class org.apache.spark.io.LZ4BlockInputStream
 
read() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
read() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
read() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
read() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
read() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
read() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
read() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
read() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
read() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
read() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
read() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
read() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
read() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
read() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
read() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
read() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
read() - Static method in class org.apache.spark.ml.feature.IDFModel
 
read() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
read() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
read() - Static method in class org.apache.spark.ml.feature.PCAModel
 
read() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
read() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
read() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
read() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
read() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
read() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
read() - Static method in class org.apache.spark.ml.Pipeline
 
read() - Static method in class org.apache.spark.ml.PipelineModel
 
read() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
read() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
read() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
read() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
read() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
read() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
read() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
read() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
read() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
read() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
read() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
read() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
read() - Method in interface org.apache.spark.ml.util.MLReadable
Returns an MLReader instance for this class.
read(Kryo, Input, Class<Iterable<?>>) - Method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
read() - Method in class org.apache.spark.sql.SparkSession
:: Experimental :: Returns a DataFrameReader that can be used to read data and streams in as a DataFrame.
read() - Method in class org.apache.spark.sql.SQLContext
:: Experimental :: Returns a DataFrameReader that can be used to read data and streams in as a DataFrame.
read() - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
read(byte[]) - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
read(byte[], int, int) - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
read(String) - Static method in class org.apache.spark.streaming.CheckpointReader
Read checkpoint files present in the given checkpoint directory.
read(String, SparkConf, Configuration, boolean) - Static method in class org.apache.spark.streaming.CheckpointReader
Read checkpoint files present in the given checkpoint directory.
read(WriteAheadLogRecordHandle) - Method in class org.apache.spark.streaming.util.WriteAheadLog
Read a written record based on the given record handle.
read() - Method in class org.apache.spark.util.io.ChunkedByteBufferInputStream
 
read(byte[], int, int) - Method in class org.apache.spark.util.io.ChunkedByteBufferInputStream
 
readAll() - Method in class org.apache.spark.streaming.util.WriteAheadLog
Read and return an iterator of all the records that have been written but not yet cleaned up.
readArray(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readBoolean(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readBooleanArr(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readBytes(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readBytes() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
readBytesArr(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readDate(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readDouble(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readDoubleArr(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readExternal(ObjectInput) - Method in class org.apache.spark.serializer.JavaSerializer
 
readExternal(ObjectInput) - Method in class org.apache.spark.storage.BlockManagerId
 
readExternal(ObjectInput) - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
readExternal(ObjectInput) - Method in class org.apache.spark.storage.StorageLevel
 
readExternal(ObjectInput) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
readExternal(ObjectInput) - Method in class org.apache.spark.streaming.flume.SparkFlumeEvent
 
readFrom(SparkConf) - Method in class org.apache.spark.internal.config.ConfigEntryWithDefault
 
readFrom(SparkConf) - Method in class org.apache.spark.internal.config.FallbackConfigEntry
 
readFrom(InputStream) - Static method in class org.apache.spark.util.sketch.BloomFilter
Reads in a BloomFilter from an input stream.
readFrom(InputStream) - Static method in class org.apache.spark.util.sketch.CountMinSketch
Reads in a CountMinSketch from an input stream.
readInt(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readIntArr(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readKey(ClassTag<T>) - Method in class org.apache.spark.serializer.DeserializationStream
Reads the object representing the key of a key-value pair.
readList(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readMap(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readObject(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readObject(ClassTag<T>) - Method in class org.apache.spark.serializer.DeserializationStream
The most general-purpose method to read an object.
readObjectType(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readRecords() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
readResolve() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
readResolve() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
readResolve() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
readResolve() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
readResolve() - Static method in class org.apache.spark.rdd.CheckpointState
 
readResolve() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
readResolve() - Static method in class org.apache.spark.scheduler.TaskLocality
 
readResolve() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
readResolve() - Static method in class org.apache.spark.TaskState
 
readSchema(Seq<String>, Option<Configuration>) - Static method in class org.apache.spark.sql.hive.orc.OrcFileOperator
 
readSqlObject(DataInputStream, char) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
readString(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readStringArr(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readStringBytes(DataInputStream, int) - Static method in class org.apache.spark.api.r.SerDe
 
readTime(DataInputStream) - Static method in class org.apache.spark.api.r.SerDe
 
readTypedObject(DataInputStream, char) - Static method in class org.apache.spark.api.r.SerDe
 
readValue(ClassTag<T>) - Method in class org.apache.spark.serializer.DeserializationStream
Reads the object representing the value of a key-value pair.
ready(Duration, CanAwait) - Method in class org.apache.spark.ComplexFutureAction
 
ready(Duration, CanAwait) - Method in interface org.apache.spark.FutureAction
Blocks until this action completes.
ready(Duration, CanAwait) - Method in class org.apache.spark.SimpleFutureAction
 
reason() - Method in class org.apache.spark.ExecutorLostFailure
 
reason() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RemoveExecutor
 
reason() - Method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
reason() - Method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
Recall - Class in org.apache.spark.mllib.evaluation.binary
Recall.
Recall() - Constructor for class org.apache.spark.mllib.evaluation.binary.Recall
 
recall(double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns recall for a given label (category)
recall() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Deprecated.
Use accuracy. Since 2.0.0.
recall() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns document-based recall averaged by the number of documents
recall(double) - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns recall for a given label (category)
recallByThreshold() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
Returns a dataframe with two fields (threshold, recall) curve.
recallByThreshold() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns the (threshold, recall) curve.
Receiver<T> - Class in org.apache.spark.streaming.receiver
:: DeveloperApi :: Abstract class of a receiver that can be run on worker nodes to receive external data.
Receiver(StorageLevel) - Constructor for class org.apache.spark.streaming.receiver.Receiver
 
RECEIVER_WAL_CLASS_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
RECEIVER_WAL_CLOSE_AFTER_WRITE_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
RECEIVER_WAL_ENABLE_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
RECEIVER_WAL_MAX_FAILURES_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
RECEIVER_WAL_ROLLING_INTERVAL_CONF_KEY() - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
ReceiverInfo - Class in org.apache.spark.streaming.scheduler
:: DeveloperApi :: Class having information about a receiver
ReceiverInfo(int, String, boolean, String, String, String, String, long) - Constructor for class org.apache.spark.streaming.scheduler.ReceiverInfo
 
receiverInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
receiverInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
receiverInfo() - Method in class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
receiverInputDStream() - Method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
receiverInputDStream() - Method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
ReceiverInputDStream<T> - Class in org.apache.spark.streaming.dstream
Abstract class for defining any InputDStream that has to start a receiver on worker nodes to receive external data.
ReceiverInputDStream(StreamingContext, ClassTag<T>) - Constructor for class org.apache.spark.streaming.dstream.ReceiverInputDStream
 
ReceiverState - Class in org.apache.spark.streaming.scheduler
Enumeration to identify current state of a Receiver
ReceiverState() - Constructor for class org.apache.spark.streaming.scheduler.ReceiverState
 
receiverStream(Receiver<T>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream with any arbitrary user implemented receiver.
receiverStream(Receiver<T>, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Create an input stream with any arbitrary user implemented receiver.
recommendProducts(int, int) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Recommends products to a user.
recommendProductsForUsers(int) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Recommends top products for all users.
recommendUsers(int, int) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Recommends users to a product.
recommendUsersForProducts(int) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Recommends top users for all products.
RECORDS_BETWEEN_BYTES_READ_METRIC_UPDATES() - Static method in class org.apache.spark.rdd.HadoopRDD
Update the input bytes read metric each time this number of records has been read
RECORDS_BETWEEN_BYTES_WRITTEN_METRIC_UPDATES() - Static method in class org.apache.spark.rdd.PairRDDFunctions
 
RECORDS_READ() - Method in class org.apache.spark.InternalAccumulator.input$
 
RECORDS_READ() - Method in class org.apache.spark.InternalAccumulator.shuffleRead$
 
RECORDS_WRITTEN() - Method in class org.apache.spark.InternalAccumulator.output$
 
RECORDS_WRITTEN() - Method in class org.apache.spark.InternalAccumulator.shuffleWrite$
 
recordsRead() - Method in class org.apache.spark.status.api.v1.InputMetricDistributions
 
recordsRead() - Method in class org.apache.spark.status.api.v1.InputMetrics
 
recordsRead() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetrics
 
recordsWritten() - Method in class org.apache.spark.status.api.v1.OutputMetricDistributions
 
recordsWritten() - Method in class org.apache.spark.status.api.v1.OutputMetrics
 
recordsWritten() - Method in class org.apache.spark.status.api.v1.ShuffleWriteMetrics
 
RedirectableOutputStream - Class in org.apache.spark.storage.memory
A wrapper which allows an open OutputStream to be redirected to a different sink.
RedirectableOutputStream() - Constructor for class org.apache.spark.storage.memory.RedirectableOutputStream
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
reduce(Function2<T, T, T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Reduces the elements of this RDD using the specified commutative and associative binary operator.
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.r.RRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
reduce(Function2<T, T, T>) - Method in class org.apache.spark.rdd.RDD
Reduces the elements of this RDD using the specified commutative and associative binary operator.
reduce(Function2<T, T, T>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Scala-specific) Reduces the elements of this Dataset using the specified binary function.
reduce(ReduceFunction<T>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: (Java-specific) Reduces the elements of this Dataset using the specified binary function.
reduce(BUF, IN) - Method in class org.apache.spark.sql.expressions.Aggregator
Combine two values to produce a new value.
reduce(Function2<A1, A1, A1>) - Static method in class org.apache.spark.sql.types.StructType
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
reduce(Function2<T, T, T>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD has a single element generated by reducing each RDD of this DStream.
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduce(Function2<T, T, T>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
reduce(Function2<T, T, T>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD has a single element generated by reducing each RDD of this DStream.
reduceByKey(Partitioner, Function2<V, V, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative and commutative reduce function.
reduceByKey(Function2<V, V, V>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative and commutative reduce function.
reduceByKey(Function2<V, V, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative and commutative reduce function.
reduceByKey(Partitioner, Function2<V, V, V>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative and commutative reduce function.
reduceByKey(Function2<V, V, V>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative and commutative reduce function.
reduceByKey(Function2<V, V, V>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative and commutative reduce function.
reduceByKey(Function2<V, V, V>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying reduceByKey to each RDD.
reduceByKey(Function2<V, V, V>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying reduceByKey to each RDD.
reduceByKey(Function2<V, V, V>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying reduceByKey to each RDD.
reduceByKey(Function2<V, V, V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKey(Function2<V, V, V>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKey(Function2<V, V, V>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKey(Function2<V, V, V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKey(Function2<V, V, V>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKey(Function2<V, V, V>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKey(Function2<V, V, V>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey to each RDD.
reduceByKey(Function2<V, V, V>, int) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey to each RDD.
reduceByKey(Function2<V, V, V>, Partitioner) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey to each RDD.
reduceByKeyAndWindow(Function2<V, V, V>, Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Create a new DStream by applying reduceByKey over a sliding window on this DStream.
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by reducing over a using incremental computation.
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, int, Function<Tuple2<K, V>, Boolean>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying incremental reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, Partitioner, Function<Tuple2<K, V>, Boolean>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying incremental reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, int, Function<Tuple2<K, V>, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, Partitioner, Function<Tuple2<K, V>, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, int, Function<Tuple2<K, V>, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, Partitioner, Function<Tuple2<K, V>, Boolean>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByKeyAndWindow(Function2<V, V, V>, Duration) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey over a sliding window on this DStream.
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, int) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Duration, Duration, Partitioner) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, int, Function1<Tuple2<K, V>, Object>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying incremental reduceByKey over a sliding window.
reduceByKeyAndWindow(Function2<V, V, V>, Function2<V, V, V>, Duration, Duration, Partitioner, Function1<Tuple2<K, V>, Object>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying incremental reduceByKey over a sliding window.
reduceByKeyLocally(Function2<V, V, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Merge the values for each key using an associative and commutative reduce function, but return the result immediately to the master as a Map.
reduceByKeyLocally(Function2<V, V, V>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Merge the values for each key using an associative and commutative reduce function, but return the results immediately to the master as a Map.
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD has a single element generated by reducing all elements in a sliding window over this DStream.
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD has a single element generated by reducing all elements in a sliding window over this DStream.
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
reduceByWindow(Function2<T, T, T>, Duration, Duration) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD has a single element generated by reducing all elements in a sliding window over this DStream.
reduceByWindow(Function2<T, T, T>, Function2<T, T, T>, Duration, Duration) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD has a single element generated by reducing all elements in a sliding window over this DStream.
ReduceFunction<T> - Interface in org.apache.spark.api.java.function
Base interface for function used in Dataset's reduce.
reduceGroups(Function2<V, V, V>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Reduces the elements of each group of data using the specified binary function.
reduceGroups(ReduceFunction<V>) - Method in class org.apache.spark.sql.KeyValueGroupedDataset
Reduces the elements of each group of data using the specified binary function.
reduceId() - Method in class org.apache.spark.FetchFailed
 
reduceId() - Method in class org.apache.spark.storage.ShuffleBlockId
 
reduceId() - Method in class org.apache.spark.storage.ShuffleDataBlockId
 
reduceId() - Method in class org.apache.spark.storage.ShuffleIndexBlockId
 
reduceLeft(Function2<B, A, B>) - Static method in class org.apache.spark.sql.types.StructType
 
reduceLeftOption(Function2<B, A, B>) - Static method in class org.apache.spark.sql.types.StructType
 
reduceOption(Function2<A1, A1, A1>) - Static method in class org.apache.spark.sql.types.StructType
 
reduceRight(Function2<A, B, B>) - Static method in class org.apache.spark.sql.types.StructType
 
reduceRightOption(Function2<A, B, B>) - Static method in class org.apache.spark.sql.types.StructType
 
references() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
regex(Regex) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
regexp_extract(Column, String, int) - Static method in class org.apache.spark.sql.functions
Extract a specific(idx) group identified by a java regex, from the specified string column.
regexp_replace(Column, String, String) - Static method in class org.apache.spark.sql.functions
Replace all substrings of the specified string value that match regexp with rep.
RegexTokenizer - Class in org.apache.spark.ml.feature
:: Experimental :: A regex based tokenizer that extracts tokens either by using the provided regex pattern to split the text (default) or repeatedly matching the regex (if gaps is false).
RegexTokenizer(String) - Constructor for class org.apache.spark.ml.feature.RegexTokenizer
 
RegexTokenizer() - Constructor for class org.apache.spark.ml.feature.RegexTokenizer
 
register(AccumulatorV2<?, ?>) - Method in class org.apache.spark.SparkContext
Register the given accumulator.
register(AccumulatorV2<?, ?>, String) - Method in class org.apache.spark.SparkContext
Register the given accumulator with given name.
register(String, String) - Static method in class org.apache.spark.sql.types.UDTRegistration
Registers an UserDefinedType to an user class.
register(String, UserDefinedAggregateFunction) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined aggregate function (UDAF).
register(String, Function0<RT>, TypeTags.TypeTag<RT>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 0 arguments as user-defined function (UDF).
register(String, Function1<A1, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 1 arguments as user-defined function (UDF).
register(String, Function2<A1, A2, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 2 arguments as user-defined function (UDF).
register(String, Function3<A1, A2, A3, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 3 arguments as user-defined function (UDF).
register(String, Function4<A1, A2, A3, A4, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 4 arguments as user-defined function (UDF).
register(String, Function5<A1, A2, A3, A4, A5, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 5 arguments as user-defined function (UDF).
register(String, Function6<A1, A2, A3, A4, A5, A6, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 6 arguments as user-defined function (UDF).
register(String, Function7<A1, A2, A3, A4, A5, A6, A7, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 7 arguments as user-defined function (UDF).
register(String, Function8<A1, A2, A3, A4, A5, A6, A7, A8, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 8 arguments as user-defined function (UDF).
register(String, Function9<A1, A2, A3, A4, A5, A6, A7, A8, A9, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 9 arguments as user-defined function (UDF).
register(String, Function10<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 10 arguments as user-defined function (UDF).
register(String, Function11<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 11 arguments as user-defined function (UDF).
register(String, Function12<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 12 arguments as user-defined function (UDF).
register(String, Function13<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 13 arguments as user-defined function (UDF).
register(String, Function14<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 14 arguments as user-defined function (UDF).
register(String, Function15<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 15 arguments as user-defined function (UDF).
register(String, Function16<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 16 arguments as user-defined function (UDF).
register(String, Function17<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>, TypeTags.TypeTag<A17>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 17 arguments as user-defined function (UDF).
register(String, Function18<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>, TypeTags.TypeTag<A17>, TypeTags.TypeTag<A18>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 18 arguments as user-defined function (UDF).
register(String, Function19<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>, TypeTags.TypeTag<A17>, TypeTags.TypeTag<A18>, TypeTags.TypeTag<A19>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 19 arguments as user-defined function (UDF).
register(String, Function20<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>, TypeTags.TypeTag<A17>, TypeTags.TypeTag<A18>, TypeTags.TypeTag<A19>, TypeTags.TypeTag<A20>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 20 arguments as user-defined function (UDF).
register(String, Function21<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>, TypeTags.TypeTag<A17>, TypeTags.TypeTag<A18>, TypeTags.TypeTag<A19>, TypeTags.TypeTag<A20>, TypeTags.TypeTag<A21>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 21 arguments as user-defined function (UDF).
register(String, Function22<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>, TypeTags.TypeTag<A11>, TypeTags.TypeTag<A12>, TypeTags.TypeTag<A13>, TypeTags.TypeTag<A14>, TypeTags.TypeTag<A15>, TypeTags.TypeTag<A16>, TypeTags.TypeTag<A17>, TypeTags.TypeTag<A18>, TypeTags.TypeTag<A19>, TypeTags.TypeTag<A20>, TypeTags.TypeTag<A21>, TypeTags.TypeTag<A22>) - Method in class org.apache.spark.sql.UDFRegistration
Register a Scala closure of 22 arguments as user-defined function (UDF).
register(String, UDF1<?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 1 arguments.
register(String, UDF2<?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 2 arguments.
register(String, UDF3<?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 3 arguments.
register(String, UDF4<?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 4 arguments.
register(String, UDF5<?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 5 arguments.
register(String, UDF6<?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 6 arguments.
register(String, UDF7<?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 7 arguments.
register(String, UDF8<?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 8 arguments.
register(String, UDF9<?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 9 arguments.
register(String, UDF10<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 10 arguments.
register(String, UDF11<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 11 arguments.
register(String, UDF12<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 12 arguments.
register(String, UDF13<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 13 arguments.
register(String, UDF14<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 14 arguments.
register(String, UDF15<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 15 arguments.
register(String, UDF16<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 16 arguments.
register(String, UDF17<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 17 arguments.
register(String, UDF18<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 18 arguments.
register(String, UDF19<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 19 arguments.
register(String, UDF20<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 20 arguments.
register(String, UDF21<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 21 arguments.
register(String, UDF22<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType) - Method in class org.apache.spark.sql.UDFRegistration
Register a user-defined function with 22 arguments.
register(QueryExecutionListener) - Method in class org.apache.spark.sql.util.ExecutionListenerManager
Registers the specified QueryExecutionListener.
register(AccumulatorV2<?, ?>) - Static method in class org.apache.spark.util.AccumulatorContext
Registers an AccumulatorV2 created on the driver such that it can be used on the executors.
register(String, Function0<Object>) - Static method in class org.apache.spark.util.SignalUtils
Adds an action to be run when a given signal is received by this process.
registerAvroSchemas(Seq<Schema>) - Method in class org.apache.spark.SparkConf
Use Kryo serialization and register the given set of Avro schemas so that the generic record serializer can decrease network IO
registerClasses(Kryo) - Method in interface org.apache.spark.serializer.KryoRegistrator
 
registerDialect(JdbcDialect) - Static method in class org.apache.spark.sql.jdbc.JdbcDialects
Register a dialect for use on all new matching jdbc org.apache.spark.sql.DataFrame.
registerKryoClasses(SparkConf) - Static method in class org.apache.spark.graphx.GraphXUtils
Registers classes that GraphX uses with Kryo.
registerKryoClasses(Class<?>[]) - Method in class org.apache.spark.SparkConf
Use Kryo serialization and register the given set of classes with Kryo.
registerLogger(Logger) - Static method in class org.apache.spark.util.SignalUtils
Register a signal handler to log signals on UNIX-like systems.
registerPython(String, UserDefinedPythonFunction) - Method in class org.apache.spark.sql.UDFRegistration
 
registerShutdownDeleteDir(File) - Static method in class org.apache.spark.util.ShutdownHookManager
 
registerSqlSerDe(Tuple2<Function2<DataInputStream, Object, Object>, Function2<DataOutputStream, Object, Object>>) - Static method in class org.apache.spark.api.r.SerDe
 
registerStream(DStream<BinarySample>) - Method in class org.apache.spark.mllib.stat.test.StreamingTest
Register a DStream of values for significance testing.
registerStream(JavaDStream<BinarySample>) - Method in class org.apache.spark.mllib.stat.test.StreamingTest
Register a JavaDStream of values for significance testing.
registerTempTable(String) - Method in class org.apache.spark.sql.Dataset
Deprecated.
Use createOrReplaceTempView(viewName) instead. Since 2.0.0.
regParam() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
regParam() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
regParam() - Static method in class org.apache.spark.ml.recommendation.ALS
 
regParam() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
regParam() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
regParam() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
regParam() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
Regression() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
RegressionEvaluator - Class in org.apache.spark.ml.evaluation
:: Experimental :: Evaluator for regression, which expects two input columns: prediction and label.
RegressionEvaluator(String) - Constructor for class org.apache.spark.ml.evaluation.RegressionEvaluator
 
RegressionEvaluator() - Constructor for class org.apache.spark.ml.evaluation.RegressionEvaluator
 
RegressionMetrics - Class in org.apache.spark.mllib.evaluation
Evaluator for regression.
RegressionMetrics(RDD<Tuple2<Object, Object>>, boolean) - Constructor for class org.apache.spark.mllib.evaluation.RegressionMetrics
 
RegressionMetrics(RDD<Tuple2<Object, Object>>) - Constructor for class org.apache.spark.mllib.evaluation.RegressionMetrics
 
RegressionModel<FeaturesType,M extends RegressionModel<FeaturesType,M>> - Class in org.apache.spark.ml.regression
:: DeveloperApi ::
RegressionModel() - Constructor for class org.apache.spark.ml.regression.RegressionModel
 
RegressionModel - Interface in org.apache.spark.mllib.regression
 
reindex() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
reindex() - Method in class org.apache.spark.graphx.VertexRDD
Construct a new VertexRDD that is indexed by only the visible vertices.
RelationalGroupedDataset - Class in org.apache.spark.sql
A set of methods for aggregations on a DataFrame, created by Dataset.groupBy.
RelationalGroupedDataset(Dataset<Row>, Seq<Expression>, RelationalGroupedDataset.GroupType) - Constructor for class org.apache.spark.sql.RelationalGroupedDataset
 
RelationalGroupedDataset.CubeType$ - Class in org.apache.spark.sql
To indicate it's the CUBE
RelationalGroupedDataset.CubeType$() - Constructor for class org.apache.spark.sql.RelationalGroupedDataset.CubeType$
 
RelationalGroupedDataset.GroupByType$ - Class in org.apache.spark.sql
To indicate it's the GroupBy
RelationalGroupedDataset.GroupByType$() - Constructor for class org.apache.spark.sql.RelationalGroupedDataset.GroupByType$
 
RelationalGroupedDataset.PivotType$ - Class in org.apache.spark.sql
 
RelationalGroupedDataset.PivotType$() - Constructor for class org.apache.spark.sql.RelationalGroupedDataset.PivotType$
 
RelationalGroupedDataset.RollupType$ - Class in org.apache.spark.sql
To indicate it's the ROLLUP
RelationalGroupedDataset.RollupType$() - Constructor for class org.apache.spark.sql.RelationalGroupedDataset.RollupType$
 
RelationProvider - Interface in org.apache.spark.sql.sources
::DeveloperApi:: Implemented by objects that produce relations for a specific kind of data source.
relativeDirection(long) - Method in class org.apache.spark.graphx.Edge
Return the relative direction of the edge to the corresponding vertex.
relativeError() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
relativeError() - Method in class org.apache.spark.util.sketch.CountMinSketch
Returns the relative error (or eps) of this CountMinSketch.
rem(Decimal, Decimal) - Method in class org.apache.spark.sql.types.Decimal.DecimalAsIfIntegral$
 
remainder(Decimal) - Method in class org.apache.spark.sql.types.Decimal
 
remember(Duration) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Sets each DStreams in this context to remember RDDs it generated in the last given duration.
remember(Duration) - Method in class org.apache.spark.streaming.StreamingContext
Set each DStream in this context to remember RDDs it generated in the last given duration.
REMOTE_BLOCKS_FETCHED() - Method in class org.apache.spark.InternalAccumulator.shuffleRead$
 
REMOTE_BYTES_READ() - Method in class org.apache.spark.InternalAccumulator.shuffleRead$
 
remoteBlocksFetched() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
remoteBlocksFetched() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetrics
 
remoteBytesRead() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
remoteBytesRead() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetrics
 
remove(String) - Static method in class org.apache.spark.api.r.JVMObjectTracker
 
remove(Param<T>) - Method in class org.apache.spark.ml.param.ParamMap
Removes a key from this map and returns its value associated previously as an option.
remove(String) - Method in class org.apache.spark.SparkConf
Remove a parameter from the configuration
remove(String) - Method in class org.apache.spark.sql.types.MetadataBuilder
 
remove() - Method in class org.apache.spark.streaming.State
Remove the state if it exists.
remove(long) - Static method in class org.apache.spark.util.AccumulatorContext
Unregisters the AccumulatorV2 with the given ID, if any.
removeFromDriver() - Method in class org.apache.spark.storage.BlockManagerMessages.RemoveBroadcast
 
removeListener(ContinuousQueryListener) - Method in class org.apache.spark.sql.ContinuousQueryManager
Deregister a ContinuousQueryListener.
removeMetadata(String, DataType) - Static method in class org.apache.spark.sql.types.StructType
 
removeSelfEdges() - Method in class org.apache.spark.graphx.GraphOps
Remove self edges.
removeShutdownDeleteDir(File) - Static method in class org.apache.spark.util.ShutdownHookManager
 
removeShutdownHook(Object) - Static method in class org.apache.spark.util.ShutdownHookManager
Remove a previously installed shutdown hook.
rep(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
rep1(Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
rep1(Function0<Parsers.Parser<T>>, Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
rep1sep(Function0<Parsers.Parser<T>>, Function0<Parsers.Parser<Object>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
repartition(int) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a new RDD that has exactly numPartitions partitions.
repartition(int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a new RDD that has exactly numPartitions partitions.
repartition(int) - Method in class org.apache.spark.api.java.JavaRDD
Return a new RDD that has exactly numPartitions partitions.
repartition(int, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
repartition(int, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
repartition(int, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Return a new RDD that has exactly numPartitions partitions.
repartition(int, Column...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset partitioned by the given partitioning expressions into numPartitions.
repartition(Column...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset partitioned by the given partitioning expressions, using spark.sql.shuffle.partitions as number of partitions.
repartition(int) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset that has exactly numPartitions partitions.
repartition(int, Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset partitioned by the given partitioning expressions into numPartitions.
repartition(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset partitioned by the given partitioning expressions, using spark.sql.shuffle.partitions as number of partitions.
repartition(int) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Return a new DStream with an increased or decreased level of parallelism.
repartition(int) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
repartition(int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream with an increased or decreased level of parallelism.
repartition(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
repartition(int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
repartition(int) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
repartition(int) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream with an increased or decreased level of parallelism.
repartition$default$2(int) - Static method in class org.apache.spark.api.r.RRDD
 
repartition$default$2(int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
repartition$default$2(int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
repartition$default$2(int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
repartition$default$2(int) - Static method in class org.apache.spark.graphx.VertexRDD
 
repartition$default$2(int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
repartition$default$2(int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
repartition$default$2(int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
repartition$default$2(int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
repartitionAndSortWithinPartitions(Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Repartition the RDD according to the given partitioner and, within each resulting partition, sort records by their keys.
repartitionAndSortWithinPartitions(Partitioner, Comparator<K>) - Method in class org.apache.spark.api.java.JavaPairRDD
Repartition the RDD according to the given partitioner and, within each resulting partition, sort records by their keys.
repartitionAndSortWithinPartitions(Partitioner) - Method in class org.apache.spark.rdd.OrderedRDDFunctions
Repartition the RDD according to the given partitioner and, within each resulting partition, sort records by their keys.
repeat(Column, int) - Static method in class org.apache.spark.sql.functions
Repeats a string column n times, and returns it as a new string column.
replace(String, Map<T, T>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Replaces values matching keys in replacement map with the corresponding values.
replace(String[], Map<T, T>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
Replaces values matching keys in replacement map with the corresponding values.
replace(String, Map<T, T>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Replaces values matching keys in replacement map.
replace(Seq<String>, Map<T, T>) - Method in class org.apache.spark.sql.DataFrameNaFunctions
(Scala-specific) Replaces values matching keys in replacement map.
replicatedVertexView() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
replication() - Method in class org.apache.spark.storage.StorageLevel
 
repN(int, Function0<Parsers.Parser<T>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
reportError(String, Throwable) - Method in class org.apache.spark.streaming.receiver.Receiver
Report exceptions in receiving data.
repr() - Static method in class org.apache.spark.sql.types.StructType
 
repsep(Function0<Parsers.Parser<T>>, Function0<Parsers.Parser<Object>>) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
requestedTotal() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.RequestExecutors
 
requestExecutors(int) - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Request an additional number of executors from the cluster manager.
requiredChildDistribution() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
requiredChildOrdering() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
res() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
reservoirSampleAndCount(Iterator<T>, int, long, ClassTag<T>) - Static method in class org.apache.spark.util.random.SamplingUtils
Reservoir sampling implementation that also returns the input size.
reset() - Method in class org.apache.spark.io.LZ4BlockInputStream
 
reset() - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
reset() - Method in class org.apache.spark.util.AccumulatorV2
Resets this accumulator, which is zero value.
reset() - Method in class org.apache.spark.util.DoubleAccumulator
 
reset() - Method in class org.apache.spark.util.LegacyAccumulatorWrapper
 
reset() - Method in class org.apache.spark.util.ListAccumulator
 
reset() - Method in class org.apache.spark.util.LongAccumulator
 
resetTerminated() - Method in class org.apache.spark.sql.ContinuousQueryManager
Forget about past terminated queries so that awaitAnyTermination() can be used again to wait for new terminations.
residualDegreeOfFreedom() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
The residual degrees of freedom.
residualDegreeOfFreedomNull() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
The residual degrees of freedom for the null model.
residuals() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Get the default residuals (deviance residuals) of the fitted model.
residuals(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionSummary
Get the residuals of the fitted model by type.
residuals() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Residuals (label - predicted value)
resolve(String) - Method in class org.apache.spark.sql.Dataset
 
resolveURI(String) - Static method in class org.apache.spark.util.Utils
Return a well-formed URI for the file described by a user input string.
resolveURIs(String) - Static method in class org.apache.spark.util.Utils
Resolve a comma-separated list of paths.
responder() - Method in class org.apache.spark.ui.JettyUtils.ServletParams
 
responseFromBackup(String) - Static method in class org.apache.spark.util.Utils
Return true if the response message is sent from a backup Master on standby.
restart(String) - Method in class org.apache.spark.streaming.receiver.Receiver
Restart the receiver.
restart(String, Throwable) - Method in class org.apache.spark.streaming.receiver.Receiver
Restart the receiver.
restart(String, Throwable, int) - Method in class org.apache.spark.streaming.receiver.Receiver
Restart the receiver.
ResubmitFailedStages - Class in org.apache.spark.scheduler
 
ResubmitFailedStages() - Constructor for class org.apache.spark.scheduler.ResubmitFailedStages
 
Resubmitted - Class in org.apache.spark
:: DeveloperApi :: A ShuffleMapTask that completed successfully earlier, but we lost the executor before the stage completed.
Resubmitted() - Constructor for class org.apache.spark.Resubmitted
 
result(Duration, CanAwait) - Method in class org.apache.spark.ComplexFutureAction
 
result(Duration, CanAwait) - Method in interface org.apache.spark.FutureAction
Awaits and returns the result (of type T) of this action.
result(Duration, CanAwait) - Method in class org.apache.spark.SimpleFutureAction
 
RESULT_SERIALIZATION_TIME() - Static method in class org.apache.spark.InternalAccumulator
 
RESULT_SERIALIZATION_TIME() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
RESULT_SERIALIZATION_TIME() - Static method in class org.apache.spark.ui.ToolTips
 
RESULT_SIZE() - Static method in class org.apache.spark.InternalAccumulator
 
resultSerializationTime() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
resultSerializationTime() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
resultSetToObjectArray(ResultSet) - Static method in class org.apache.spark.rdd.JdbcRDD
 
resultSize() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
resultSize() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
retainedJobs() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
retainedStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
retryWaitMs(SparkConf) - Static method in class org.apache.spark.util.RpcUtils
Returns the configured number of milliseconds to wait on each retry
ReturnStatementFinder - Class in org.apache.spark.util
 
ReturnStatementFinder() - Constructor for class org.apache.spark.util.ReturnStatementFinder
 
reverse() - Method in class org.apache.spark.graphx.EdgeDirection
Reverse the direction of an edge.
reverse() - Method in class org.apache.spark.graphx.EdgeRDD
Reverse all the edges in this RDD.
reverse() - Method in class org.apache.spark.graphx.Graph
Reverses all edges in the graph.
reverse() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
reverse() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
reverse(Column) - Static method in class org.apache.spark.sql.functions
Reverses the string column and returns it as a new string column.
reverse() - Static method in class org.apache.spark.sql.types.StructType
 
reversed() - Static method in class org.apache.spark.sql.types.StructType
 
reverseIterator() - Static method in class org.apache.spark.sql.types.StructType
 
reverseMap(Function1<A, B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
reverseRoutingTables() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
reverseRoutingTables() - Method in class org.apache.spark.graphx.VertexRDD
Returns a new VertexRDD reflecting a reversal of all edge directions in the corresponding EdgeRDD.
ReviveOffers - Class in org.apache.spark.scheduler.local
 
ReviveOffers() - Constructor for class org.apache.spark.scheduler.local.ReviveOffers
 
RFormula - Class in org.apache.spark.ml.feature
:: Experimental :: Implements the transforms required for fitting a dataset against an R model formula.
RFormula(String) - Constructor for class org.apache.spark.ml.feature.RFormula
 
RFormula() - Constructor for class org.apache.spark.ml.feature.RFormula
 
RFormulaModel - Class in org.apache.spark.ml.feature
:: Experimental :: A fitted RFormula.
RFormulaParser - Class in org.apache.spark.ml.feature
Limited implementation of R formula parsing.
RFormulaParser() - Constructor for class org.apache.spark.ml.feature.RFormulaParser
 
RidgeRegressionModel - Class in org.apache.spark.mllib.regression
Regression model trained using RidgeRegression.
RidgeRegressionModel(Vector, double) - Constructor for class org.apache.spark.mllib.regression.RidgeRegressionModel
 
RidgeRegressionWithSGD - Class in org.apache.spark.mllib.regression
Deprecated.
Use ml.regression.LinearRegression with elasticNetParam = 0.0. Note the default regParam is 0.01 for RidgeRegressionWithSGD, but is 0.0 for LinearRegression. Since 2.0.0.
RidgeRegressionWithSGD() - Constructor for class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
Construct a RidgeRegression object with default parameters: {stepSize: 1.0, numIterations: 100, regParam: 0.01, miniBatchFraction: 1.0}.
right() - Method in class org.apache.spark.sql.sources.And
 
right() - Method in class org.apache.spark.sql.sources.Or
 
rightCategories() - Method in class org.apache.spark.ml.tree.CategoricalSplit
Get sorted categories which split to the right
rightChild() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
rightChild() - Method in class org.apache.spark.ml.tree.InternalNode
 
rightChildIndex(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Return the index of the right child of this node.
rightImpurity() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
rightNode() - Method in class org.apache.spark.mllib.tree.model.Node
 
rightNodeId() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
rightOuterJoin(JavaPairRDD<K, W>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a right outer join of this and other.
rightOuterJoin(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a right outer join of this and other.
rightOuterJoin(JavaPairRDD<K, W>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Perform a right outer join of this and other.
rightOuterJoin(RDD<Tuple2<K, W>>, Partitioner) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a right outer join of this and other.
rightOuterJoin(RDD<Tuple2<K, W>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a right outer join of this and other.
rightOuterJoin(RDD<Tuple2<K, W>>, int) - Method in class org.apache.spark.rdd.PairRDDFunctions
Perform a right outer join of this and other.
rightOuterJoin(JavaPairDStream<K, W>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'right outer join' between RDDs of this DStream and other DStream.
rightOuterJoin(JavaPairDStream<K, W>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'right outer join' between RDDs of this DStream and other DStream.
rightOuterJoin(JavaPairDStream<K, W>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by applying 'right outer join' between RDDs of this DStream and other DStream.
rightOuterJoin(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
rightOuterJoin(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
rightOuterJoin(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
rightOuterJoin(JavaPairDStream<K, W>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
rightOuterJoin(JavaPairDStream<K, W>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
rightOuterJoin(JavaPairDStream<K, W>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
rightOuterJoin(DStream<Tuple2<K, W>>, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'right outer join' between RDDs of this DStream and other DStream.
rightOuterJoin(DStream<Tuple2<K, W>>, int, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'right outer join' between RDDs of this DStream and other DStream.
rightOuterJoin(DStream<Tuple2<K, W>>, Partitioner, ClassTag<W>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new DStream by applying 'right outer join' between RDDs of this DStream and other DStream.
rightPredict() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
rint(Column) - Static method in class org.apache.spark.sql.functions
Returns the double value that is closest in value to the argument and is equal to a mathematical integer.
rint(String) - Static method in class org.apache.spark.sql.functions
Returns the double value that is closest in value to the argument and is equal to a mathematical integer.
rlike(String) - Method in class org.apache.spark.sql.Column
SQL RLIKE expression (LIKE with Regex).
RMATa() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
RMATb() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
RMATc() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
RMATd() - Static method in class org.apache.spark.graphx.util.GraphGenerators
 
rmatGraph(SparkContext, int, int) - Static method in class org.apache.spark.graphx.util.GraphGenerators
A random graph generator using the R-MAT model, proposed in "R-MAT: A Recursive Model for Graph Mining" by Chakrabarti et al.
rnd() - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
roc() - Method in class org.apache.spark.ml.classification.BinaryLogisticRegressionSummary
Returns the receiver operating characteristic (ROC) curve, which is an Dataframe having two fields (FPR, TPR) with (0.0, 0.0) prepended and (1.0, 1.0) appended to it.
roc() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns the receiver operating characteristic (ROC) curve, which is an RDD of (false positive rate, true positive rate) with (0.0, 0.0) prepended and (1.0, 1.0) appended to it.
rollup(Column...) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional rollup for the current Dataset using the specified columns, so we can run aggregation on them.
rollup(String, String...) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional rollup for the current Dataset using the specified columns, so we can run aggregation on them.
rollup(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional rollup for the current Dataset using the specified columns, so we can run aggregation on them.
rollup(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Create a multi-dimensional rollup for the current Dataset using the specified columns, so we can run aggregation on them.
rootMeanSquaredError() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
Returns the root mean squared error, which is defined as the square root of the mean squared error.
rootMeanSquaredError() - Method in class org.apache.spark.mllib.evaluation.RegressionMetrics
Returns the root mean squared error, which is defined as the square root of the mean squared error.
rootNode() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
rootNode() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
round(Column) - Static method in class org.apache.spark.sql.functions
Returns the value of the column e rounded to 0 decimal places.
round(Column, int) - Static method in class org.apache.spark.sql.functions
Round the value of e to scale decimal places if scale >= 0 or at integral part when scale < 0.
ROUND_CEILING() - Static method in class org.apache.spark.sql.types.Decimal
 
ROUND_FLOOR() - Static method in class org.apache.spark.sql.types.Decimal
 
ROUND_HALF_EVEN() - Static method in class org.apache.spark.sql.types.Decimal
 
ROUND_HALF_UP() - Static method in class org.apache.spark.sql.types.Decimal
 
ROW() - Static method in class org.apache.spark.api.r.SerializationFormats
 
Row - Interface in org.apache.spark.sql
Represents one row of output from a relational operator.
row_number() - Static method in class org.apache.spark.sql.functions
Window function: returns a sequential number starting at 1 within a window partition.
RowFactory - Class in org.apache.spark.sql
A factory class used to construct Row objects.
RowFactory() - Constructor for class org.apache.spark.sql.RowFactory
 
rowIndices() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
rowIndices() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
rowIter() - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
rowIter() - Method in interface org.apache.spark.ml.linalg.Matrix
Returns an iterator of row vectors.
rowIter() - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
rowIter() - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
rowIter() - Method in interface org.apache.spark.mllib.linalg.Matrix
Returns an iterator of row vectors.
rowIter() - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
RowMatrix - Class in org.apache.spark.mllib.linalg.distributed
Represents a row-oriented distributed Matrix with no meaningful row indices.
RowMatrix(RDD<Vector>, long, int) - Constructor for class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
RowMatrix(RDD<Vector>) - Constructor for class org.apache.spark.mllib.linalg.distributed.RowMatrix
Alternative constructor leaving matrix dimensions to be determined automatically.
rows() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
 
rows() - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
 
rowsBetween(long, long) - Method in class org.apache.spark.sql.expressions.WindowSpec
Defines the frame boundaries, from start (inclusive) to end (inclusive).
rowsPerBlock() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
 
rPackages() - Static method in class org.apache.spark.api.r.RUtils
 
rpad(Column, int, String) - Static method in class org.apache.spark.sql.functions
Right-padded with pad to a length of len.
RpcUtils - Class in org.apache.spark.util
 
RpcUtils() - Constructor for class org.apache.spark.util.RpcUtils
 
RRDD<T> - Class in org.apache.spark.api.r
An RDD that stores serialized R objects as Array[Byte].
RRDD(RDD<T>, byte[], String, String, byte[], Object[], ClassTag<T>) - Constructor for class org.apache.spark.api.r.RRDD
 
rtrim(Column) - Static method in class org.apache.spark.sql.functions
Trim the spaces from right end for the specified string value.
run(Graph<VD, ED>, int, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.ConnectedComponents
Compute the connected component membership of each vertex and return a graph with the vertex value containing the lowest vertex id in the connected component containing that vertex.
run(Graph<VD, ED>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.ConnectedComponents
Compute the connected component membership of each vertex and return a graph with the vertex value containing the lowest vertex id in the connected component containing that vertex.
run(Graph<VD, ED>, int, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.LabelPropagation
Run static Label Propagation for detecting communities in networks.
run(Graph<VD, ED>, int, double, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.PageRank
Run PageRank for a fixed number of iterations returning a graph with vertex attributes containing the PageRank and edge attributes the normalized edge weight.
run(Graph<VD, ED>, Seq<Object>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.ShortestPaths
Computes shortest paths to the given set of landmark vertices.
run(Graph<VD, ED>, int, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.StronglyConnectedComponents
Compute the strongly connected component (SCC) of each vertex and return a graph with the vertex value containing the lowest vertex id in the SCC containing that vertex.
run(RDD<Edge<Object>>, SVDPlusPlus.Conf) - Static method in class org.apache.spark.graphx.lib.SVDPlusPlus
Implement SVD++ based on "Factorization Meets the Neighborhood: a Multifaceted Collaborative Filtering Model", available at http://public.research.att.com/~volinsky/netflix/kdd08koren.pdf.
run(Graph<VD, ED>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.TriangleCount
 
run(RDD<LabeledPoint>, BoostingStrategy, long) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Method to train a gradient boosting model
run(RDD<LabeledPoint>, Strategy, int, String, long, Option<<any>>, Option<String>) - Static method in class org.apache.spark.ml.tree.impl.RandomForest
Train a random forest.
run(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
Run Logistic Regression with the configured parameters on an input RDD of LabeledPoint entries.
run(RDD<LabeledPoint>, Vector) - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
Run Logistic Regression with the configured parameters on an input RDD of LabeledPoint entries starting from the initial weights provided.
run(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
run(RDD<LabeledPoint>, Vector) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
run(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.classification.NaiveBayes
Run the algorithm with the configured parameters on an input RDD of LabeledPoint entries.
run(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
run(RDD<LabeledPoint>, Vector) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
run(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Runs the bisecting k-means algorithm.
run(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Java-friendly version of run().
run(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Perform expectation maximization
run(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Java-friendly version of run()
run(RDD<Vector>) - Method in class org.apache.spark.mllib.clustering.KMeans
Train a K-means model on the given set of points; data should be cached for high performance, because this is an iterative algorithm.
run(RDD<Tuple2<Object, Vector>>) - Method in class org.apache.spark.mllib.clustering.LDA
Learn an LDA model using the given dataset.
run(JavaPairRDD<Long, Vector>) - Method in class org.apache.spark.mllib.clustering.LDA
Java-friendly version of run()
run(Graph<Object, Object>) - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering
Run the PIC algorithm on Graph.
run(RDD<Tuple3<Object, Object, Object>>) - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering
Run the PIC algorithm.
run(JavaRDD<Tuple3<Long, Long, Double>>) - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering
A Java-friendly version of PowerIterationClustering.run.
run(RDD<FPGrowth.FreqItemset<Item>>, ClassTag<Item>) - Method in class org.apache.spark.mllib.fpm.AssociationRules
Computes the association rules with confidence above minConfidence.
run(JavaRDD<FPGrowth.FreqItemset<Item>>) - Method in class org.apache.spark.mllib.fpm.AssociationRules
Java-friendly version of run.
run(RDD<Object>, ClassTag<Item>) - Method in class org.apache.spark.mllib.fpm.FPGrowth
Computes an FP-Growth model that contains frequent itemsets.
run(JavaRDD<Basket>) - Method in class org.apache.spark.mllib.fpm.FPGrowth
Java-friendly version of run.
run(RDD<Object[]>, ClassTag<Item>) - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Finds the complete set of frequent sequential patterns in the input sequences of itemsets.
run(JavaRDD<Sequence>) - Method in class org.apache.spark.mllib.fpm.PrefixSpan
A Java-friendly version of run() that reads sequences from a JavaRDD and returns frequent sequences in a PrefixSpanModel.
run(RDD<Rating>) - Method in class org.apache.spark.mllib.recommendation.ALS
Run ALS with the configured parameters on an input RDD of Rating objects.
run(JavaRDD<Rating>) - Method in class org.apache.spark.mllib.recommendation.ALS
Java-friendly version of ALS.run.
run(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Run the algorithm with the configured parameters on an input RDD of LabeledPoint entries.
run(RDD<LabeledPoint>, Vector) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Run the algorithm with the configured parameters on an input RDD of LabeledPoint entries starting from the initial weights provided.
run(RDD<Tuple3<Object, Object, Object>>) - Method in class org.apache.spark.mllib.regression.IsotonicRegression
Run IsotonicRegression algorithm to obtain isotonic regression model.
run(JavaRDD<Tuple3<Double, Double, Double>>) - Method in class org.apache.spark.mllib.regression.IsotonicRegression
Run pool adjacent violators algorithm to obtain isotonic regression model.
run(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
run(RDD<LabeledPoint>, Vector) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
run(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
run(RDD<LabeledPoint>, Vector) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
run(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
run(RDD<LabeledPoint>, Vector) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
run(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model over an RDD
run(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.tree.GradientBoostedTrees
Method to train a gradient boosting model
run(JavaRDD<LabeledPoint>) - Method in class org.apache.spark.mllib.tree.GradientBoostedTrees
Java-friendly API for org.apache.spark.mllib.tree.GradientBoostedTrees!#run.
run(RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.tree.RandomForest
Method to train a decision tree model over an RDD
run() - Method in class org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread
 
run() - Method in class org.apache.spark.util.SparkShutdownHook
 
runApproximateJob(RDD<T>, Function2<TaskContext, Iterator<T>, U>, <any>, long) - Method in class org.apache.spark.SparkContext
:: DeveloperApi :: Run a job that can return approximate results.
runInNewThread(String, boolean, Function0<T>) - Static method in class org.apache.spark.util.ThreadUtils
Run a piece of code in a new thread and return the result.
runJob(RDD<T>, Function2<TaskContext, Iterator<T>, U>, Seq<Object>, Function2<Object, U, BoxedUnit>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a function on a given set of partitions in an RDD and pass the results to the given handler function.
runJob(RDD<T>, Function2<TaskContext, Iterator<T>, U>, Seq<Object>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a function on a given set of partitions in an RDD and return the results as an array.
runJob(RDD<T>, Function1<Iterator<T>, U>, Seq<Object>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a job on a given set of partitions of an RDD, but take a function of type Iterator[T] => U instead of (TaskContext, Iterator[T]) => U.
runJob(RDD<T>, Function2<TaskContext, Iterator<T>, U>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a job on all partitions in an RDD and return the results in an array.
runJob(RDD<T>, Function1<Iterator<T>, U>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a job on all partitions in an RDD and return the results in an array.
runJob(RDD<T>, Function2<TaskContext, Iterator<T>, U>, Function2<Object, U, BoxedUnit>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a job on all partitions in an RDD and pass the results to a handler function.
runJob(RDD<T>, Function1<Iterator<T>, U>, Function2<Object, U, BoxedUnit>, ClassTag<U>) - Method in class org.apache.spark.SparkContext
Run a job on all partitions in an RDD and pass the results to a handler function.
runLBFGS(RDD<Tuple2<Object, Vector>>, Gradient, Updater, int, double, int, double, Vector) - Static method in class org.apache.spark.mllib.optimization.LBFGS
Run Limited-memory BFGS (L-BFGS) in parallel.
runMiniBatchSGD(RDD<Tuple2<Object, Vector>>, Gradient, Updater, double, int, double, double, Vector, double) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
Run stochastic gradient descent (SGD) in parallel using mini batches.
runMiniBatchSGD(RDD<Tuple2<Object, Vector>>, Gradient, Updater, double, int, double, double, Vector) - Static method in class org.apache.spark.mllib.optimization.GradientDescent
Alias of runMiniBatchSGD with convergenceTol set to default value of 0.001.
running() - Method in class org.apache.spark.scheduler.TaskInfo
 
RUNNING() - Static method in class org.apache.spark.TaskState
 
runPreCanonicalized(Graph<VD, ED>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.TriangleCount
 
runtimeConf() - Method in class org.apache.spark.sql.SQLContext
 
RuntimeConfig - Class in org.apache.spark.sql
Runtime configuration interface for Spark.
RuntimePercentage - Class in org.apache.spark.scheduler
 
RuntimePercentage(double, Option<Object>, double) - Constructor for class org.apache.spark.scheduler.RuntimePercentage
 
runUntilConvergence(Graph<VD, ED>, double, double, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.PageRank
Run a dynamic version of PageRank returning a graph with vertex attributes containing the PageRank and edge attributes containing the normalized edge weight.
runUntilConvergenceWithOptions(Graph<VD, ED>, double, double, Option<Object>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.PageRank
Run a dynamic version of PageRank returning a graph with vertex attributes containing the PageRank and edge attributes containing the normalized edge weight.
runWith(Function1<B, U>) - Static method in class org.apache.spark.sql.types.StructType
 
runWithOptions(Graph<VD, ED>, int, double, Option<Object>, ClassTag<VD>, ClassTag<ED>) - Static method in class org.apache.spark.graphx.lib.PageRank
Run PageRank for a fixed number of iterations returning a graph with vertex attributes containing the PageRank and edge attributes the normalized edge weight.
runWithValidation(RDD<LabeledPoint>, RDD<LabeledPoint>, BoostingStrategy, long) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Method to validate a gradient boosting model
runWithValidation(RDD<LabeledPoint>, RDD<LabeledPoint>) - Method in class org.apache.spark.mllib.tree.GradientBoostedTrees
Method to validate a gradient boosting model
runWithValidation(JavaRDD<LabeledPoint>, JavaRDD<LabeledPoint>) - Method in class org.apache.spark.mllib.tree.GradientBoostedTrees
Java-friendly API for org.apache.spark.mllib.tree.GradientBoostedTrees!#runWithValidation.
RUtils - Class in org.apache.spark.api.r
 
RUtils() - Constructor for class org.apache.spark.api.r.RUtils
 
RWrappers - Class in org.apache.spark.ml.r
This is the Scala stub of SparkR read.ml.
RWrappers() - Constructor for class org.apache.spark.ml.r.RWrappers
 

S

s() - Method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
sameElements(GenIterable<B>) - Static method in class org.apache.spark.sql.types.StructType
 
sameResult(PlanType) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
sameThread() - Static method in class org.apache.spark.util.ThreadUtils
An ExecutionContextExecutor that runs each task in the thread that invokes execute/submit.
sample(boolean, Double) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a sampled subset of this RDD.
sample(boolean, Double, long) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a sampled subset of this RDD.
sample(boolean, double) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a sampled subset of this RDD.
sample(boolean, double, long) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a sampled subset of this RDD.
sample(boolean, double) - Method in class org.apache.spark.api.java.JavaRDD
Return a sampled subset of this RDD.
sample(boolean, double, long) - Method in class org.apache.spark.api.java.JavaRDD
Return a sampled subset of this RDD.
sample(boolean, double, long) - Static method in class org.apache.spark.api.r.RRDD
 
sample(boolean, double, long) - Static method in class org.apache.spark.graphx.EdgeRDD
 
sample(boolean, double, long) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
sample(boolean, double, long) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
sample(boolean, double, long) - Static method in class org.apache.spark.graphx.VertexRDD
 
sample(boolean, double, long) - Static method in class org.apache.spark.rdd.HadoopRDD
 
sample(boolean, double, long) - Static method in class org.apache.spark.rdd.JdbcRDD
 
sample(boolean, double, long) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
sample(boolean, double, long) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
sample(boolean, double, long) - Method in class org.apache.spark.rdd.RDD
Return a sampled subset of this RDD.
sample(boolean, double, long) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset by sampling a fraction of rows.
sample(boolean, double) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset by sampling a fraction of rows, using a random seed.
sample() - Method in class org.apache.spark.util.random.BernoulliCellSampler
 
sample() - Method in class org.apache.spark.util.random.BernoulliSampler
 
sample() - Method in class org.apache.spark.util.random.PoissonSampler
 
sample(Iterator<T>) - Method in class org.apache.spark.util.random.PoissonSampler
 
sample(Iterator<T>) - Method in interface org.apache.spark.util.random.RandomSampler
take a random sample
sample() - Method in interface org.apache.spark.util.random.RandomSampler
Whether to sample the next item or not.
sample$default$3() - Static method in class org.apache.spark.api.r.RRDD
 
sample$default$3() - Static method in class org.apache.spark.graphx.EdgeRDD
 
sample$default$3() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
sample$default$3() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
sample$default$3() - Static method in class org.apache.spark.graphx.VertexRDD
 
sample$default$3() - Static method in class org.apache.spark.rdd.HadoopRDD
 
sample$default$3() - Static method in class org.apache.spark.rdd.JdbcRDD
 
sample$default$3() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
sample$default$3() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
sampleBy(String, Map<T, Object>, long) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Returns a stratified sample without replacement based on the fraction given on each stratum.
sampleBy(String, Map<T, Double>, long) - Method in class org.apache.spark.sql.DataFrameStatFunctions
Returns a stratified sample without replacement based on the fraction given on each stratum.
sampleByKey(boolean, Map<K, Double>, long) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a subset of this RDD sampled by key (via stratified sampling).
sampleByKey(boolean, Map<K, Double>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a subset of this RDD sampled by key (via stratified sampling).
sampleByKey(boolean, Map<K, Object>, long) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return a subset of this RDD sampled by key (via stratified sampling).
sampleByKeyExact(boolean, Map<K, Double>, long) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a subset of this RDD sampled by key (via stratified sampling) containing exactly math.ceil(numItems * samplingRate) for each stratum (group of pairs with the same key).
sampleByKeyExact(boolean, Map<K, Double>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return a subset of this RDD sampled by key (via stratified sampling) containing exactly math.ceil(numItems * samplingRate) for each stratum (group of pairs with the same key).
sampleByKeyExact(boolean, Map<K, Object>, long) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return a subset of this RDD sampled by key (via stratified sampling) containing exactly math.ceil(numItems * samplingRate) for each stratum (group of pairs with the same key).
sampleStdev() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute the sample standard deviation of this RDD's elements (which corrects for bias in estimating the standard deviation by dividing by N-1 instead of N).
sampleStdev() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute the sample standard deviation of this RDD's elements (which corrects for bias in estimating the standard deviation by dividing by N-1 instead of N).
sampleStdev() - Method in class org.apache.spark.util.StatCounter
Return the sample standard deviation of the values, which corrects for bias in estimating the variance by dividing by N-1 instead of N.
sampleVariance() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute the sample variance of this RDD's elements (which corrects for bias in estimating the standard variance by dividing by N-1 instead of N).
sampleVariance() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute the sample variance of this RDD's elements (which corrects for bias in estimating the variance by dividing by N-1 instead of N).
sampleVariance() - Method in class org.apache.spark.util.StatCounter
Return the sample variance, which corrects for bias in estimating the variance by dividing by N-1 instead of N.
SamplingUtils - Class in org.apache.spark.util.random
 
SamplingUtils() - Constructor for class org.apache.spark.util.random.SamplingUtils
 
save(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
save(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
save(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
save(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
save(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
save(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
save(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
save(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
save(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
save(String) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
save(String) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
save(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
save(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
save(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
save(String) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
save(String) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
save(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
save(String) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
save(String) - Static method in class org.apache.spark.ml.clustering.KMeans
 
save(String) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
save(String) - Static method in class org.apache.spark.ml.clustering.LDA
 
save(String) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
save(String) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
save(String) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
save(String) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
save(String) - Static method in class org.apache.spark.ml.feature.Binarizer
 
save(String) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
save(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
save(String) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
save(String) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
save(String) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
save(String) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
save(String) - Static method in class org.apache.spark.ml.feature.DCT
 
save(String) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
save(String) - Static method in class org.apache.spark.ml.feature.HashingTF
 
save(String) - Static method in class org.apache.spark.ml.feature.IDF
 
save(String) - Static method in class org.apache.spark.ml.feature.IDFModel
 
save(String) - Static method in class org.apache.spark.ml.feature.IndexToString
 
save(String) - Static method in class org.apache.spark.ml.feature.Interaction
 
save(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
save(String) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
save(String) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
save(String) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
save(String) - Static method in class org.apache.spark.ml.feature.NGram
 
save(String) - Static method in class org.apache.spark.ml.feature.Normalizer
 
save(String) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
save(String) - Static method in class org.apache.spark.ml.feature.PCA
 
save(String) - Static method in class org.apache.spark.ml.feature.PCAModel
 
save(String) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
save(String) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
save(String) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
save(String) - Static method in class org.apache.spark.ml.feature.RFormula
 
save(String) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
save(String) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
save(String) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
save(String) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
save(String) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
save(String) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
save(String) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
save(String) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
save(String) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
save(String) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
save(String) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
save(String) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
save(String) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
save(String) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
save(String) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
save(String) - Static method in class org.apache.spark.ml.Pipeline
 
save(String) - Static method in class org.apache.spark.ml.PipelineModel
 
save(String) - Static method in class org.apache.spark.ml.recommendation.ALS
 
save(String) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
save(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
save(String) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
save(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
save(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
save(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
save(String) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
save(String) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
save(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
save(String) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
save(String) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
save(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
save(String) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
save(String) - Method in interface org.apache.spark.ml.util.MLWritable
Saves this ML instance to the input path, a shortcut of write.save(path).
save(String) - Method in class org.apache.spark.ml.util.MLWriter
Saves the ML instances to the input path.
save(SparkContext, String, String, int, int, Vector, double, Option<Object>) - Method in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$
Helper method for saving GLM classification model metadata and data.
save(SparkContext, String) - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
save(SparkContext, String, org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0.Data) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$
 
save(SparkContext, String, org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0.Data) - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.classification.SVMModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel
 
save(SparkContext, BisectingKMeansModel, String) - Method in class org.apache.spark.mllib.clustering.BisectingKMeansModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.KMeansModel
 
save(SparkContext, KMeansModel, String) - Method in class org.apache.spark.mllib.clustering.KMeansModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel
 
save(SparkContext, PowerIterationClusteringModel, String) - Method in class org.apache.spark.mllib.clustering.PowerIterationClusteringModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel
 
save(SparkContext, ChiSqSelectorModel, String) - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.feature.Word2VecModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.fpm.FPGrowthModel
Save this model to the given path.
save(FPGrowthModel<?>, String) - Method in class org.apache.spark.mllib.fpm.FPGrowthModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.fpm.PrefixSpanModel
Save this model to the given path.
save(PrefixSpanModel<?>, String) - Method in class org.apache.spark.mllib.fpm.PrefixSpanModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
Save this model to the given path.
save(MatrixFactorizationModel, String) - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel.SaveLoadV1_0$
Saves a MatrixFactorizationModel, where user features are saved under data/users and product features are saved under data/products.
save(SparkContext, String, String, Vector, double) - Method in class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$
Helper method for saving GLM regression model metadata and data.
save(SparkContext, String) - Method in class org.apache.spark.mllib.regression.IsotonicRegressionModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.regression.LassoModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
 
save(SparkContext, String, DecisionTreeModel) - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
save(SparkContext, String) - Method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
save(SparkContext, String) - Method in interface org.apache.spark.mllib.util.Saveable
Save this model to the given path.
save(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame at the specified path.
save() - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame as the specified table.
Saveable - Interface in org.apache.spark.mllib.util
:: DeveloperApi ::
saveAsHadoopDataset(JobConf) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported storage system, using a Hadoop JobConf object for that storage system.
saveAsHadoopDataset(JobConf) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported storage system, using a Hadoop JobConf object for that storage system.
saveAsHadoopFile(String, Class<?>, Class<?>, Class<F>, JobConf) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported file system.
saveAsHadoopFile(String, Class<?>, Class<?>, Class<F>) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported file system.
saveAsHadoopFile(String, Class<?>, Class<?>, Class<F>, Class<? extends CompressionCodec>) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported file system, compressing with the supplied codec.
saveAsHadoopFile(String, ClassTag<F>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported file system, using a Hadoop OutputFormat class supporting the key and value types K and V in this RDD.
saveAsHadoopFile(String, Class<? extends CompressionCodec>, ClassTag<F>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported file system, using a Hadoop OutputFormat class supporting the key and value types K and V in this RDD.
saveAsHadoopFile(String, Class<?>, Class<?>, Class<? extends OutputFormat<?, ?>>, Class<? extends CompressionCodec>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported file system, using a Hadoop OutputFormat class supporting the key and value types K and V in this RDD.
saveAsHadoopFile(String, Class<?>, Class<?>, Class<? extends OutputFormat<?, ?>>, JobConf, Option<Class<? extends CompressionCodec>>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported file system, using a Hadoop OutputFormat class supporting the key and value types K and V in this RDD.
saveAsHadoopFiles(String, String) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Save each RDD in this DStream as a Hadoop file.
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<F>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Save each RDD in this DStream as a Hadoop file.
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<F>, JobConf) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Save each RDD in this DStream as a Hadoop file.
saveAsHadoopFiles(String, String) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<F>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<F>, JobConf) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsHadoopFiles(String, String) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<F>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<F>, JobConf) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsHadoopFiles(String, String, ClassTag<F>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Save each RDD in this DStream as a Hadoop file.
saveAsHadoopFiles(String, String, Class<?>, Class<?>, Class<? extends OutputFormat<?, ?>>, JobConf) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Save each RDD in this DStream as a Hadoop file.
saveAsLibSVMFile(RDD<LabeledPoint>, String) - Static method in class org.apache.spark.mllib.util.MLUtils
Save labeled data in LIBSVM format.
saveAsNewAPIHadoopDataset(Configuration) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported storage system, using a Configuration object for that storage system.
saveAsNewAPIHadoopDataset(Configuration) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported storage system with new Hadoop API, using a Hadoop Configuration object for that storage system.
saveAsNewAPIHadoopFile(String, Class<?>, Class<?>, Class<F>, Configuration) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported file system.
saveAsNewAPIHadoopFile(String, Class<?>, Class<?>, Class<F>) - Method in class org.apache.spark.api.java.JavaPairRDD
Output the RDD to any Hadoop-supported file system.
saveAsNewAPIHadoopFile(String, ClassTag<F>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported file system, using a new Hadoop API OutputFormat (mapreduce.OutputFormat) object supporting the key and value types K and V in this RDD.
saveAsNewAPIHadoopFile(String, Class<?>, Class<?>, Class<? extends OutputFormat<?, ?>>, Configuration) - Method in class org.apache.spark.rdd.PairRDDFunctions
Output the RDD to any Hadoop-supported file system, using a new Hadoop API OutputFormat (mapreduce.OutputFormat) object supporting the key and value types K and V in this RDD.
saveAsNewAPIHadoopFiles(String, String) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Save each RDD in this DStream as a Hadoop file.
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<F>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Save each RDD in this DStream as a Hadoop file.
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<F>, Configuration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Save each RDD in this DStream as a Hadoop file.
saveAsNewAPIHadoopFiles(String, String) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<F>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<F>, Configuration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsNewAPIHadoopFiles(String, String) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<F>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<F>, Configuration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsNewAPIHadoopFiles(String, String, ClassTag<F>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Save each RDD in this DStream as a Hadoop file.
saveAsNewAPIHadoopFiles(String, String, Class<?>, Class<?>, Class<? extends OutputFormat<?, ?>>, Configuration) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Save each RDD in this DStream as a Hadoop file.
saveAsNewAPIHadoopFiles$default$6() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
saveAsNewAPIHadoopFiles$default$6() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
saveAsObjectFile(String) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.api.java.JavaRDD
 
saveAsObjectFile(String) - Method in interface org.apache.spark.api.java.JavaRDDLike
Save this RDD as a SequenceFile of serialized objects.
saveAsObjectFile(String) - Static method in class org.apache.spark.api.r.RRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.graphx.EdgeRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
saveAsObjectFile(String) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
saveAsObjectFile(String) - Static method in class org.apache.spark.graphx.VertexRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.rdd.HadoopRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.rdd.JdbcRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
saveAsObjectFile(String) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
saveAsObjectFile(String) - Method in class org.apache.spark.rdd.RDD
Save this RDD as a SequenceFile of serialized objects.
saveAsObjectFiles(String, String) - Method in class org.apache.spark.streaming.dstream.DStream
Save each RDD in this DStream as a Sequence file of serialized objects.
saveAsSequenceFile(String, Option<Class<? extends CompressionCodec>>) - Method in class org.apache.spark.rdd.SequenceFileRDDFunctions
Output the RDD as a Hadoop SequenceFile using the Writable types we infer from the RDD's key and value types.
saveAsTable(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame as the specified table.
saveAsTextFile(String) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.api.java.JavaRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.api.java.JavaRDD
 
saveAsTextFile(String) - Method in interface org.apache.spark.api.java.JavaRDDLike
Save this RDD as a text file, using string representations of elements.
saveAsTextFile(String, Class<? extends CompressionCodec>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Save this RDD as a compressed text file, using string representations of elements.
saveAsTextFile(String) - Static method in class org.apache.spark.api.r.RRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.api.r.RRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.graphx.EdgeRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
saveAsTextFile(String) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
saveAsTextFile(String) - Static method in class org.apache.spark.graphx.VertexRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.graphx.VertexRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.rdd.HadoopRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.rdd.JdbcRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
saveAsTextFile(String) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
saveAsTextFile(String, Class<? extends CompressionCodec>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
saveAsTextFile(String) - Method in class org.apache.spark.rdd.RDD
Save this RDD as a text file, using string representations of elements.
saveAsTextFile(String, Class<? extends CompressionCodec>) - Method in class org.apache.spark.rdd.RDD
Save this RDD as a compressed text file, using string representations of elements.
saveAsTextFiles(String, String) - Method in class org.apache.spark.streaming.dstream.DStream
Save each RDD in this DStream as at text file, using string representation of elements.
saveImpl(Params, PipelineStage[], SparkContext, String) - Method in class org.apache.spark.ml.Pipeline.SharedReadWrite$
Save metadata and stages for a Pipeline or PipelineModel - save metadata to path/metadata - save stages to stages/IDX_UID
saveImpl(M, String, SQLContext, JsonAST.JObject) - Static method in class org.apache.spark.ml.tree.EnsembleModelReadWrite
Helper method for saving a tree ensemble to disk.
saveImpl(String) - Method in class org.apache.spark.ml.util.MLWriter
save() handles overwriting and then calls this method.
saveMode(String) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
SaveMode - Enum in org.apache.spark.sql
SaveMode is used to specify the expected behavior of saving a DataFrame to a data source.
sc() - Method in class org.apache.spark.api.java.JavaSparkContext
 
sc() - Static method in class org.apache.spark.ml.r.RWrappers
 
sc() - Method in class org.apache.spark.sql.SQLImplicits.StringToColumn
 
scal(double, Vector) - Static method in class org.apache.spark.ml.linalg.BLAS
x = a * x
scal(double, Vector) - Static method in class org.apache.spark.mllib.linalg.BLAS
x = a * x
scalaBoolean() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive boolean type.
scalaByte() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive byte type.
scalaDouble() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive double type.
scalaFloat() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive float type.
scalaInt() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive int type.
scalaIntToJavaLong(DStream<Object>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
scalaIntToJavaLong(DStream<Object>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
 
scalaIntToJavaLong(DStream<Object>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
scalaIntToJavaLong(DStream<Object>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
scalaIntToJavaLong(DStream<Object>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
scalaIntToJavaLong(DStream<Object>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
scalaIntToJavaLong(DStream<Object>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
scalaLong() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive long type.
scalaShort() - Static method in class org.apache.spark.sql.Encoders
An encoder for Scala's primitive short type.
scalaToJavaLong(JavaPairDStream<K, Object>, ClassTag<K>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
scale() - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
scale() - Method in class org.apache.spark.mllib.random.GammaGenerator
 
scale() - Method in class org.apache.spark.sql.types.Decimal
 
scale() - Method in class org.apache.spark.sql.types.DecimalType
 
scalingVec() - Method in class org.apache.spark.ml.feature.ElementwiseProduct
the vector to multiply with input vectors
scalingVec() - Method in class org.apache.spark.mllib.feature.ElementwiseProduct
 
scan(B, Function2<B, B, B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
scanLeft(B, Function2<B, A, B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
scanRight(B, Function2<A, B, B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
SCHEDULED() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
SCHEDULER_DELAY() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
SCHEDULER_DELAY() - Static method in class org.apache.spark.ui.ToolTips
 
schedulingDelay() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
Time taken for the first job of this batch to start processing from the time this batch was submitted to the streaming scheduler.
SchedulingMode - Class in org.apache.spark.scheduler
"FAIR" and "FIFO" determines which policy is used to order tasks amongst a Schedulable's sub-queues "NONE" is used when the a Schedulable has no sub-queues.
SchedulingMode() - Constructor for class org.apache.spark.scheduler.SchedulingMode
 
schedulingMode() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
schedulingPool() - Method in class org.apache.spark.status.api.v1.StageData
 
schedulingPool() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
schema(StructType) - Method in class org.apache.spark.sql.DataFrameReader
Specifies the input schema.
schema() - Method in class org.apache.spark.sql.Dataset
Returns the schema of this Dataset.
schema() - Method in interface org.apache.spark.sql.Encoder
Returns the schema of encoding this type of object as a Row.
schema() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
schema() - Method in interface org.apache.spark.sql.Row
Schema for the row.
schema() - Method in class org.apache.spark.sql.sources.BaseRelation
 
SchemaRelationProvider - Interface in org.apache.spark.sql.sources
::DeveloperApi:: Implemented by objects that produce relations for a specific kind of data source with a given schema.
schemaString() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
SchemaUtils - Class in org.apache.spark.ml.util
Utils for handling schemas.
SchemaUtils() - Constructor for class org.apache.spark.ml.util.SchemaUtils
 
scope() - Method in class org.apache.spark.storage.RDDInfo
 
scoreAndLabels() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
 
scratch() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
ScriptTransformationWriterThread - Class in org.apache.spark.sql.hive.execution
 
ScriptTransformationWriterThread(Iterator<InternalRow>, Seq<DataType>, org.apache.spark.sql.catalyst.expressions.Projection, AbstractSerDe, ObjectInspector, HiveScriptIOSchema, OutputStream, Process, org.apache.spark.util.CircularBuffer, TaskContext, Configuration) - Constructor for class org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread
 
second(Column) - Static method in class org.apache.spark.sql.functions
Extracts the seconds as an integer from a given date/timestamp/string.
seconds() - Static method in class org.apache.spark.scheduler.StatsReportListener
 
seconds(long) - Static method in class org.apache.spark.streaming.Durations
 
Seconds - Class in org.apache.spark.streaming
Helper object that creates instance of Duration representing a given number of seconds.
Seconds() - Constructor for class org.apache.spark.streaming.Seconds
 
securityManager() - Method in class org.apache.spark.SparkEnv
 
seed() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
seed() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
seed() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
seed() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
seed() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
seed() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
seed() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
seed() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
seed() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
seed() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
seed() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
seed() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
seed() - Static method in class org.apache.spark.ml.clustering.KMeans
 
seed() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
seed() - Static method in class org.apache.spark.ml.clustering.LDA
 
seed() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
seed() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
seed() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
seed() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
seed() - Static method in class org.apache.spark.ml.recommendation.ALS
 
seed() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
seed() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
seed() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
seed() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
seed() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
seed() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
seed() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
seed() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
seed() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
seed() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
seedBrokers() - Method in class org.apache.spark.streaming.kafka.KafkaCluster.SimpleConsumerConfig
 
segmentLength(Function1<A, Object>, int) - Static method in class org.apache.spark.sql.types.StructType
 
select(Column...) - Method in class org.apache.spark.sql.Dataset
Selects a set of column based expressions.
select(String, String...) - Method in class org.apache.spark.sql.Dataset
Selects a set of columns.
select(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Selects a set of column based expressions.
select(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Selects a set of columns.
select(TypedColumn<T, U1>, Encoder<U1>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Returns a new Dataset by computing the given Column expression for each element.
select(TypedColumn<T, U1>, TypedColumn<T, U2>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Returns a new Dataset by computing the given Column expressions for each element.
select(TypedColumn<T, U1>, TypedColumn<T, U2>, TypedColumn<T, U3>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Returns a new Dataset by computing the given Column expressions for each element.
select(TypedColumn<T, U1>, TypedColumn<T, U2>, TypedColumn<T, U3>, TypedColumn<T, U4>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Returns a new Dataset by computing the given Column expressions for each element.
select(TypedColumn<T, U1>, TypedColumn<T, U2>, TypedColumn<T, U3>, TypedColumn<T, U4>, TypedColumn<T, U5>) - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Returns a new Dataset by computing the given Column expressions for each element.
selectedFeatures() - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
list of indices to select (filter).
selectedFeatures() - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel
 
selectExpr(String...) - Method in class org.apache.spark.sql.Dataset
Selects a set of SQL expressions.
selectExpr(Seq<String>) - Method in class org.apache.spark.sql.Dataset
Selects a set of SQL expressions.
selectUntyped(Seq<TypedColumn<?, ?>>) - Method in class org.apache.spark.sql.Dataset
Internal helper function for building typed selects that return tuples.
sender() - Method in class org.apache.spark.storage.BlockManagerMessages.RegisterBlockManager
 
sendToDst(A) - Method in class org.apache.spark.graphx.EdgeContext
Sends a message to the destination vertex.
sendToDst(A) - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
sendToSrc(A) - Method in class org.apache.spark.graphx.EdgeContext
Sends a message to the source vertex.
sendToSrc(A) - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
seq() - Static method in class org.apache.spark.sql.types.StructType
 
seqToString(Seq<T>, Function1<T, String>) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
sequence() - Method in class org.apache.spark.mllib.fpm.PrefixSpan.FreqSequence
 
sequenceFile(String, Class<K>, Class<V>, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a Hadoop SequenceFile with given key and value types.
sequenceFile(String, Class<K>, Class<V>) - Method in class org.apache.spark.api.java.JavaSparkContext
Get an RDD for a Hadoop SequenceFile.
sequenceFile(String, Class<K>, Class<V>, int) - Method in class org.apache.spark.SparkContext
Get an RDD for a Hadoop SequenceFile with given key and value types.
sequenceFile(String, Class<K>, Class<V>) - Method in class org.apache.spark.SparkContext
Get an RDD for a Hadoop SequenceFile with given key and value types.
sequenceFile(String, int, ClassTag<K>, ClassTag<V>, Function0<WritableConverter<K>>, Function0<WritableConverter<V>>) - Method in class org.apache.spark.SparkContext
Version of sequenceFile() for types implicitly convertible to Writables through a WritableConverter.
SequenceFileRDDFunctions<K,V> - Class in org.apache.spark.rdd
Extra functions available on RDDs of (key, value) pairs to create a Hadoop SequenceFile, through an implicit conversion.
SequenceFileRDDFunctions(RDD<Tuple2<K, V>>, Class<? extends Writable>, Class<? extends Writable>, Function1<K, Writable>, ClassTag<K>, Function1<V, Writable>, ClassTag<V>) - Constructor for class org.apache.spark.rdd.SequenceFileRDDFunctions
 
SerDe - Class in org.apache.spark.api.r
Utility functions to serialize, deserialize objects to / from R
SerDe() - Constructor for class org.apache.spark.api.r.SerDe
 
serde() - Method in class org.apache.spark.sql.internal.HiveSerDe
 
SerializableWritable<T extends org.apache.hadoop.io.Writable> - Class in org.apache.spark
 
SerializableWritable(T) - Constructor for class org.apache.spark.SerializableWritable
 
SerializationDebugger - Class in org.apache.spark.serializer
 
SerializationDebugger() - Constructor for class org.apache.spark.serializer.SerializationDebugger
 
SerializationDebugger.ObjectStreamClassMethods - Class in org.apache.spark.serializer
An implicit class that allows us to call private methods of ObjectStreamClass.
SerializationDebugger.ObjectStreamClassMethods(ObjectStreamClass) - Constructor for class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods
 
SerializationDebugger.ObjectStreamClassMethods$ - Class in org.apache.spark.serializer
 
SerializationDebugger.ObjectStreamClassMethods$() - Constructor for class org.apache.spark.serializer.SerializationDebugger.ObjectStreamClassMethods$
 
SerializationFormats - Class in org.apache.spark.api.r
 
SerializationFormats() - Constructor for class org.apache.spark.api.r.SerializationFormats
 
SerializationStream - Class in org.apache.spark.serializer
:: DeveloperApi :: A stream for writing serialized objects.
SerializationStream() - Constructor for class org.apache.spark.serializer.SerializationStream
 
serialize(Vector) - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
serialize(T, ClassTag<T>) - Method in class org.apache.spark.serializer.DummySerializerInstance
 
serialize(T, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializerInstance
 
serialize(T) - Static method in class org.apache.spark.util.Utils
Serialize an object using Java serialization
SERIALIZED_R_DATA_SCHEMA() - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
serializedData() - Method in class org.apache.spark.scheduler.local.StatusUpdate
 
SerializedMemoryEntry<T> - Class in org.apache.spark.storage.memory
 
SerializedMemoryEntry(org.apache.spark.util.io.ChunkedByteBuffer, MemoryMode, ClassTag<T>) - Constructor for class org.apache.spark.storage.memory.SerializedMemoryEntry
 
Serializer - Class in org.apache.spark.serializer
:: DeveloperApi :: A serializer.
Serializer() - Constructor for class org.apache.spark.serializer.Serializer
 
serializer() - Method in class org.apache.spark.ShuffleDependency
 
serializer() - Method in class org.apache.spark.SparkEnv
 
SerializerInstance - Class in org.apache.spark.serializer
:: DeveloperApi :: An instance of a serializer, for use by one thread at a time.
SerializerInstance() - Constructor for class org.apache.spark.serializer.SerializerInstance
 
serializerManager() - Method in class org.apache.spark.SparkEnv
 
serializeStream(OutputStream) - Method in class org.apache.spark.serializer.DummySerializerInstance
 
serializeStream(OutputStream) - Method in class org.apache.spark.serializer.SerializerInstance
 
serializeViaNestedStream(OutputStream, SerializerInstance, Function1<SerializationStream, BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Serialize via nested stream using specific serializer
sessionState() - Method in class org.apache.spark.sql.hive.HiveContext
Deprecated.
 
sessionState() - Method in class org.apache.spark.sql.SparkSession
State isolated across sessions, including SQL configurations, temporary tables, registered functions, and everything else that accepts a SQLConf.
sessionState() - Method in class org.apache.spark.sql.SQLContext
 
set(Edge<ED>) - Method in class org.apache.spark.graphx.EdgeTriplet
Set the edge properties of this triplet.
set(long, long, int, int, VD, VD, ED) - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
set(String, Object) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.KMeans
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.KMeans
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.LDA
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.LDA
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.LDA
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
set(String, Object) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
set(String, Object) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
set(String, Object) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
set(String, Object) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Binarizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Binarizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.DCT
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.DCT
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.DCT
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.HashingTF
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.HashingTF
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.IDF
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.IDF
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.IDF
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.IDFModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.IDFModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.IndexToString
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.IndexToString
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Interaction
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Interaction
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Interaction
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.NGram
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.NGram
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.NGram
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Normalizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Normalizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.PCA
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.PCA
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.PCA
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.PCAModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.PCAModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.RFormula
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.RFormula
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.RFormula
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
set(String, Object) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
set(Param<T>, T) - Method in interface org.apache.spark.ml.param.Params
Sets a parameter in the embedded param map.
set(String, Object) - Method in interface org.apache.spark.ml.param.Params
Sets a parameter (by name) in the embedded param map.
set(ParamPair<?>) - Method in interface org.apache.spark.ml.param.Params
Sets a parameter in the embedded param map.
set(Param<T>, T) - Static method in class org.apache.spark.ml.Pipeline
 
set(String, Object) - Static method in class org.apache.spark.ml.Pipeline
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.Pipeline
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.PipelineModel
 
set(String, Object) - Static method in class org.apache.spark.ml.PipelineModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.PipelineModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.recommendation.ALS
 
set(String, Object) - Static method in class org.apache.spark.ml.recommendation.ALS
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
set(String, Object) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
set(String, Object) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
set(String, Object) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
set(String, Object) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
set(String, Object) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
set(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
set(String, Object) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
set(ParamPair<?>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
set(String, String) - Method in class org.apache.spark.SparkConf
Set a configuration variable.
set(SparkEnv) - Static method in class org.apache.spark.SparkEnv
 
set(String, String) - Method in class org.apache.spark.sql.RuntimeConfig
Sets the given Spark runtime configuration property.
set(String, boolean) - Method in class org.apache.spark.sql.RuntimeConfig
Sets the given Spark runtime configuration property.
set(String, long) - Method in class org.apache.spark.sql.RuntimeConfig
Sets the given Spark runtime configuration property.
set(long) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given Long.
set(int) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given Int.
set(long, int, int) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given unscaled Long, with a given precision and scale.
set(BigDecimal, int, int) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given BigDecimal value, with a given precision and scale.
set(BigDecimal) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given BigDecimal value, inheriting its precision and scale.
set(Decimal) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given Decimal value.
setAcceptsNull(boolean) - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
setActive(SQLContext) - Static method in class org.apache.spark.sql.SQLContext
Changes the SQLContext that will be returned in this thread and its children when SQLContext.getOrCreate() is called.
setAggregator(Aggregator<K, V, C>) - Method in class org.apache.spark.rdd.ShuffledRDD
Set aggregator for RDD's shuffle.
setAlgo(String) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
Sets Algorithm using a String.
setAll(Traversable<Tuple2<String, String>>) - Method in class org.apache.spark.SparkConf
Set multiple parameters together
setAlpha(double) - Method in class org.apache.spark.ml.recommendation.ALS
 
setAlpha(Vector) - Method in class org.apache.spark.mllib.clustering.LDA
Alias for setDocConcentration()
setAlpha(double) - Method in class org.apache.spark.mllib.clustering.LDA
Alias for setDocConcentration()
setAlpha(double) - Method in class org.apache.spark.mllib.recommendation.ALS
Sets the constant used in computing confidence in implicit ALS.
setAppName(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set the application name.
setAppName(String) - Method in class org.apache.spark.SparkConf
Set a name for your application.
setAppResource(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set the main application resource.
setBandwidth(double) - Method in class org.apache.spark.mllib.stat.KernelDensity
Sets the bandwidth (standard deviation) of the Gaussian kernel (default: 1.0).
setBeta(double) - Method in class org.apache.spark.mllib.clustering.LDA
Alias for setTopicConcentration()
setBinary(boolean) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
setBinary(boolean) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setBinary(boolean) - Method in class org.apache.spark.ml.feature.HashingTF
 
setBinary(boolean) - Method in class org.apache.spark.mllib.feature.HashingTF
If true, term frequency vector will be binary such that non-zero term counts will be set to 1 (default: false)
setBlocks(int) - Method in class org.apache.spark.mllib.recommendation.ALS
Set the number of blocks for both user blocks and product blocks to parallelize the computation into; pass -1 for an auto-configured number of blocks.
setBlockSize(int) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setCacheNodeIds(boolean) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setCacheNodeIds(boolean) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setCacheNodeIds(boolean) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setCacheNodeIds(boolean) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setCacheNodeIds(boolean) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setCacheNodeIds(boolean) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setCacheNodeIds(boolean) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setCacheNodeIds(boolean) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setCacheNodeIds(boolean) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setCacheNodeIds(boolean) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setCacheNodeIds(boolean) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setCacheNodeIds(boolean) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setCallSite(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Pass-through to SparkContext.setCallSite.
setCallSite(String) - Method in class org.apache.spark.SparkContext
Set the thread-local property for overriding the call sites of actions and RDDs.
setCaseSensitive(boolean) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
setCategoricalFeaturesInfo(Map<Integer, Integer>) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
Sets categoricalFeaturesInfo using a Java Map.
setCensorCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setCheckpointDir(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Set the directory under which RDDs are going to be checkpointed.
setCheckpointDir(String) - Method in class org.apache.spark.SparkContext
Set the directory under which RDDs are going to be checkpointed.
setCheckpointInterval(int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setCheckpointInterval(int) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setCheckpointInterval(int) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.clustering.LDA
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.recommendation.ALS
 
setCheckpointInterval(int) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setCheckpointInterval(int) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setCheckpointInterval(int) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setCheckpointInterval(int) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setCheckpointInterval(int) - Method in class org.apache.spark.mllib.clustering.LDA
Parameter for set checkpoint interval (>= 1) or disable checkpoint (-1).
setCheckpointInterval(int) - Method in class org.apache.spark.mllib.recommendation.ALS
Set period (in iterations) between checkpoints (default = 10).
setCheckpointInterval(int) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setClassifier(Classifier<?, ?, ?>) - Method in class org.apache.spark.ml.classification.OneVsRest
 
setConf(String, String) - Method in class org.apache.spark.launcher.SparkLauncher
Set a single configuration value for the application.
setConf(Properties) - Method in class org.apache.spark.sql.SQLContext
Set Spark SQL configuration properties.
setConf(String, String) - Method in class org.apache.spark.sql.SQLContext
Set the given Spark SQL configuration property.
setConfig(String, String) - Static method in class org.apache.spark.launcher.SparkLauncher
Set a configuration value for the launcher library.
setConsumerOffsetMetadata(String, Map<TopicAndPartition, OffsetAndMetadata>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
Requires Kafka >= 0.8.1.1.
setConsumerOffsetMetadata(String, Map<TopicAndPartition, OffsetAndMetadata>, short) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
setConsumerOffsets(String, Map<TopicAndPartition, Object>) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
Requires Kafka >= 0.8.1.1.
setConsumerOffsets(String, Map<TopicAndPartition, Object>, short) - Method in class org.apache.spark.streaming.kafka.KafkaCluster
 
setConvergenceTol(double) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Set the largest change in log-likelihood at which convergence is considered to have occurred.
setConvergenceTol(double) - Method in class org.apache.spark.mllib.optimization.GradientDescent
Set the convergence tolerance.
setConvergenceTol(double) - Method in class org.apache.spark.mllib.optimization.LBFGS
Set the convergence tolerance of iterations for L-BFGS.
setConvergenceTol(double) - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Set the convergence tolerance.
setCurrentDatabase(String) - Method in class org.apache.spark.sql.catalog.Catalog
Sets the current default database in this session.
setCurrentDatabase(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Sets the current default database in this session.
setCustomHostname(String) - Static method in class org.apache.spark.util.Utils
Allow setting a custom host name because when we run on Mesos we need to use the same hostname it reports to the master.
setDecayFactor(double) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Set the forgetfulness of the previous centroids.
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.KMeans
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.KMeans
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.LDA
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.LDA
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Binarizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Binarizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.DCT
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.DCT
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.HashingTF
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.HashingTF
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.IDF
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.IDF
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.IDFModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.IndexToString
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.IndexToString
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Interaction
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Interaction
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.NGram
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.NGram
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Normalizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.PCA
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.PCA
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.PCAModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.RFormula
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.RFormula
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.StandardScaler
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
setDefault(Param<T>, T) - Method in interface org.apache.spark.ml.param.Params
Sets a default value for a param.
setDefault(Seq<ParamPair<?>>) - Method in interface org.apache.spark.ml.param.Params
Sets default values for a list of params.
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.Pipeline
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.Pipeline
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.PipelineModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.PipelineModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.recommendation.ALS
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.recommendation.ALS
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setDefault(Param<T>, T) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
setDefault(Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
setDefaultClassLoader(ClassLoader) - Static method in class org.apache.spark.serializer.KryoSerializer
 
setDefaultClassLoader(ClassLoader) - Method in class org.apache.spark.serializer.Serializer
Sets a class loader for the serializer to use in deserialization.
setDegree(int) - Method in class org.apache.spark.ml.feature.PolynomialExpansion
 
setDeployMode(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set the deploy mode for the application.
setDocConcentration(double[]) - Method in class org.apache.spark.ml.clustering.LDA
 
setDocConcentration(double) - Method in class org.apache.spark.ml.clustering.LDA
 
setDocConcentration(Vector) - Method in class org.apache.spark.mllib.clustering.LDA
Concentration parameter (commonly named "alpha") for the prior placed on documents' distributions over topics ("theta").
setDocConcentration(double) - Method in class org.apache.spark.mllib.clustering.LDA
Replicates a Double docConcentration to create a symmetric prior.
setDropLast(boolean) - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
setElasticNetParam(double) - Method in class org.apache.spark.ml.classification.LogisticRegression
Set the ElasticNet mixing parameter.
setElasticNetParam(double) - Method in class org.apache.spark.ml.regression.LinearRegression
Set the ElasticNet mixing parameter.
setEpsilon(double) - Method in class org.apache.spark.mllib.clustering.KMeans
Set the distance threshold within which we've consider centers to have converged.
setEstimator(Estimator<?>) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
setEstimator(Estimator<?>) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setEstimatorParamMaps(ParamMap[]) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
setEstimatorParamMaps(ParamMap[]) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setEvaluator(Evaluator) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
setEvaluator(Evaluator) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setExecutorEnv(String, String) - Method in class org.apache.spark.SparkConf
Set an environment variable to be used when launching executors for this application.
setExecutorEnv(Seq<Tuple2<String, String>>) - Method in class org.apache.spark.SparkConf
Set multiple environment variables to be used when launching executors.
setExecutorEnv(Tuple2<String, String>[]) - Method in class org.apache.spark.SparkConf
Set multiple environment variables to be used when launching executors.
setFamily(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the value of param family.
setFeatureIndex(int) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
setFeatureIndex(int) - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.classification.OneVsRest
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.clustering.KMeans
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.clustering.KMeansModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.clustering.LDA
The features for LDA should be a Vector representing the word counts in a document.
setFeaturesCol(String) - Method in class org.apache.spark.ml.clustering.LDAModel
The features for LDA should be a Vector representing the word counts in a document.
setFeaturesCol(String) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.feature.RFormula
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.PredictionModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.Predictor
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
setFeaturesCol(String) - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setFeaturesCol(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setFeatureSubsetStrategy(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setFeatureSubsetStrategy(String) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setFeatureSubsetStrategy(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setFeatureSubsetStrategy(String) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setFinalRDDStorageLevel(StorageLevel) - Method in class org.apache.spark.mllib.recommendation.ALS
:: DeveloperApi :: Sets storage level for final RDDs (user/product used in MatrixFactorizationModel).
setFinalStorageLevel(String) - Method in class org.apache.spark.ml.recommendation.ALS
 
setFitIntercept(boolean) - Method in class org.apache.spark.ml.classification.LogisticRegression
Whether to fit an intercept term.
setFitIntercept(boolean) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
Set if we should fit the intercept Default is true.
setFitIntercept(boolean) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets if we should fit the intercept.
setFitIntercept(boolean) - Method in class org.apache.spark.ml.regression.LinearRegression
Set if we should fit the intercept Default is true.
setFormula(String) - Method in class org.apache.spark.ml.feature.RFormula
Sets the formula to use for this transformer.
setGaps(boolean) - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
setGenerics(Kryo, Class<?>[]) - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
setGradient(Gradient) - Method in class org.apache.spark.mllib.optimization.GradientDescent
Set the gradient function (of the loss function of one single data example) to be used for SGD.
setGradient(Gradient) - Method in class org.apache.spark.mllib.optimization.LBFGS
Set the gradient function (of the loss function of one single data example) to be used for L-BFGS.
setHalfLife(double, String) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Set the half life and time unit ("batches" or "points").
setHandleInvalid(String) - Method in class org.apache.spark.ml.feature.StringIndexer
 
setHandleInvalid(String) - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
setHashAlgorithm(String) - Method in class org.apache.spark.mllib.feature.HashingTF
Set the hash algorithm used when mapping term to integer.
setIfMissing(String, String) - Method in class org.apache.spark.SparkConf
Set a parameter if it isn't already configured
setImmutable(boolean) - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
setImplicitPrefs(boolean) - Method in class org.apache.spark.ml.recommendation.ALS
 
setImplicitPrefs(boolean) - Method in class org.apache.spark.mllib.recommendation.ALS
Sets whether to use implicit preference.
setImpurity(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setImpurity(String) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setImpurity(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setImpurity(String) - Method in class org.apache.spark.ml.classification.GBTClassifier
The impurity setting is ignored for GBT models.
setImpurity(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setImpurity(String) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setImpurity(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setImpurity(String) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setImpurity(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setImpurity(String) - Method in class org.apache.spark.ml.regression.GBTRegressor
The impurity setting is ignored for GBT models.
setImpurity(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setImpurity(String) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setImpurity(Impurity) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setIndices(int[]) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
setInitialCenters(Vector[], double[]) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Specify initial centers directly.
setInitializationMode(String) - Method in class org.apache.spark.mllib.clustering.KMeans
Set the initialization algorithm.
setInitializationMode(String) - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering
Set the initialization mode.
setInitializationSteps(int) - Method in class org.apache.spark.mllib.clustering.KMeans
Set the number of steps for the k-means|| initialization mode.
setInitialModel(GaussianMixtureModel) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Set the initial GMM starting point, bypassing the random initialization.
setInitialModel(KMeansModel) - Method in class org.apache.spark.mllib.clustering.KMeans
Set the initial starting point, bypassing the random initialization or k-means|| The condition model.k == this.k must be met, failure results in an IllegalArgumentException.
setInitialWeights(Vector) - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
Set the initial weights.
setInitialWeights(Vector) - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Set the initial weights.
setInitMode(String) - Method in class org.apache.spark.ml.clustering.KMeans
 
setInitSteps(int) - Method in class org.apache.spark.ml.clustering.KMeans
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.Binarizer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.Bucketizer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.DCT
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.HashingTF
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.IDF
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.IDFModel
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.IndexToString
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.MaxAbsScaler
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.NGram
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.Normalizer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.PCA
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.PCAModel
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.StandardScaler
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.StringIndexer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
setInputCol(String) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.VectorIndexer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setInputCol(String) - Method in class org.apache.spark.ml.feature.Word2VecModel
 
setInputCol(String) - Method in class org.apache.spark.ml.UnaryTransformer
 
setInputCols(String[]) - Method in class org.apache.spark.ml.feature.Interaction
 
setInputCols(String[]) - Method in class org.apache.spark.ml.feature.VectorAssembler
 
setIntercept(boolean) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
setIntercept(boolean) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
setIntercept(boolean) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Set if the algorithm should add an intercept.
setIntercept(boolean) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
setIntercept(boolean) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
setIntercept(boolean) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
setIntermediateRDDStorageLevel(StorageLevel) - Method in class org.apache.spark.mllib.recommendation.ALS
:: DeveloperApi :: Sets storage level for intermediate RDDs (user/product in/out links).
setIntermediateStorageLevel(String) - Method in class org.apache.spark.ml.recommendation.ALS
 
setInverse(boolean) - Method in class org.apache.spark.ml.feature.DCT
 
setIsotonic(boolean) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
setIsotonic(boolean) - Method in class org.apache.spark.mllib.regression.IsotonicRegression
Sets the isotonic parameter.
setItemCol(String) - Method in class org.apache.spark.ml.recommendation.ALS
 
setItemCol(String) - Method in class org.apache.spark.ml.recommendation.ALSModel
 
setIterations(int) - Method in class org.apache.spark.mllib.recommendation.ALS
Set the number of iterations to run.
setJars(Seq<String>) - Method in class org.apache.spark.SparkConf
Set JAR files to distribute to the cluster.
setJars(String[]) - Method in class org.apache.spark.SparkConf
Set JAR files to distribute to the cluster.
setJavaHome(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set a custom JAVA_HOME for launching the Spark application.
setJobDescription(String) - Method in class org.apache.spark.SparkContext
Set a human readable description of the current job.
setJobGroup(String, String, boolean) - Method in class org.apache.spark.api.java.JavaSparkContext
Assigns a group ID to all the jobs started by this thread until the group ID is set to a different value or cleared.
setJobGroup(String, String) - Method in class org.apache.spark.api.java.JavaSparkContext
Assigns a group ID to all the jobs started by this thread until the group ID is set to a different value or cleared.
setJobGroup(String, String, boolean) - Method in class org.apache.spark.SparkContext
Assigns a group ID to all the jobs started by this thread until the group ID is set to a different value or cleared.
setK(int) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setK(int) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setK(int) - Method in class org.apache.spark.ml.clustering.KMeans
 
setK(int) - Method in class org.apache.spark.ml.clustering.LDA
 
setK(int) - Method in class org.apache.spark.ml.feature.PCA
 
setK(int) - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Sets the desired number of leaf clusters (default: 4).
setK(int) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Set the number of Gaussians in the mixture model.
setK(int) - Method in class org.apache.spark.mllib.clustering.KMeans
Set the number of clusters to create (k).
setK(int) - Method in class org.apache.spark.mllib.clustering.LDA
Set the number of topics to infer, i.e., the number of soft cluster centers.
setK(int) - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering
Set the number of clusters.
setK(int) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Set the number of clusters.
setKappa(double) - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
Learning rate: exponential decay rate---should be between (0.5, 1.0] to guarantee asymptotic convergence.
setKeepLastCheckpoint(boolean) - Method in class org.apache.spark.ml.clustering.LDA
 
setKeepLastCheckpoint(boolean) - Method in class org.apache.spark.mllib.clustering.EMLDAOptimizer
If using checkpointing, this indicates whether to keep the last checkpoint (vs clean up).
setKeyOrdering(Ordering<K>) - Method in class org.apache.spark.rdd.ShuffledRDD
Set key ordering for RDD's shuffle.
setLabelCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setLabelCol(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
setLabelCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setLabelCol(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setLabelCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setLabelCol(String) - Method in class org.apache.spark.ml.classification.OneVsRest
 
setLabelCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setLabelCol(String) - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
setLabelCol(String) - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
setLabelCol(String) - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
setLabelCol(String) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
setLabelCol(String) - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
setLabelCol(String) - Method in class org.apache.spark.ml.feature.RFormula
 
setLabelCol(String) - Method in class org.apache.spark.ml.Predictor
 
setLabelCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setLabelCol(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setLabelCol(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
setLabelCol(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
setLabelCol(String) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
setLabelCol(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
setLabelCol(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setLabels(String[]) - Method in class org.apache.spark.ml.feature.IndexToString
 
setLambda(double) - Method in class org.apache.spark.mllib.classification.NaiveBayes
Set the smoothing parameter.
setLambda(double) - Method in class org.apache.spark.mllib.recommendation.ALS
Set the regularization parameter, lambda.
setLayers(int[]) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setLearningDecay(double) - Method in class org.apache.spark.ml.clustering.LDA
 
setLearningOffset(double) - Method in class org.apache.spark.ml.clustering.LDA
 
setLearningRate(double) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets initial learning rate (default: 0.025).
setLearningRate(double) - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
setLink(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the value of param link.
setLinkPredictionCol(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the link prediction (linear predictor) column name.
setLinkPredictionCol(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
Sets the link prediction (linear predictor) column name.
setLocalProperty(String, String) - Method in class org.apache.spark.api.java.JavaSparkContext
Set a local property that affects jobs submitted from this thread, and all child threads, such as the Spark fair scheduler pool.
setLocalProperty(String, String) - Method in class org.apache.spark.SparkContext
Set a local property that affects jobs submitted from this thread, such as the Spark fair scheduler pool.
setLogLevel(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Control our logLevel.
setLogLevel(String) - Method in class org.apache.spark.SparkContext
Control our logLevel.
setLogLevel(Level) - Static method in class org.apache.spark.util.Utils
configure a new log4j level
setLoss(Loss) - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
setLossType(String) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setLossType(String) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMainClass(String) - Method in class org.apache.spark.launcher.SparkLauncher
Sets the application class name for Java/Scala applications.
setMapSideCombine(boolean) - Method in class org.apache.spark.rdd.ShuffledRDD
Set mapSideCombine flag for RDD's shuffle.
setMaster(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set the Spark master for the application.
setMaster(String) - Method in class org.apache.spark.SparkConf
The master URL to connect to, such as "local" to run locally with one thread, "local[4]" to run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster.
setMax(double) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
setMax(double) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setMaxBins(int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setMaxBins(int) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setMaxBins(int) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setMaxBins(int) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setMaxBins(int) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setMaxBins(int) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setMaxBins(int) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setMaxBins(int) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setMaxBins(int) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setMaxBins(int) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMaxBins(int) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setMaxBins(int) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setMaxBins(int) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setMaxCategories(int) - Method in class org.apache.spark.ml.feature.VectorIndexer
 
setMaxDepth(int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setMaxDepth(int) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setMaxDepth(int) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setMaxDepth(int) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setMaxDepth(int) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setMaxDepth(int) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setMaxDepth(int) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setMaxDepth(int) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setMaxDepth(int) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setMaxDepth(int) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMaxDepth(int) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setMaxDepth(int) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setMaxDepth(int) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setMaxIter(int) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setMaxIter(int) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setMaxIter(int) - Method in class org.apache.spark.ml.classification.LogisticRegression
Set the maximum number of iterations.
setMaxIter(int) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
Set the maximum number of iterations.
setMaxIter(int) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setMaxIter(int) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setMaxIter(int) - Method in class org.apache.spark.ml.clustering.KMeans
 
setMaxIter(int) - Method in class org.apache.spark.ml.clustering.LDA
 
setMaxIter(int) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setMaxIter(int) - Method in class org.apache.spark.ml.recommendation.ALS
 
setMaxIter(int) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
Set the maximum number of iterations.
setMaxIter(int) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setMaxIter(int) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMaxIter(int) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the maximum number of iterations (applicable for solver "irls").
setMaxIter(int) - Method in class org.apache.spark.ml.regression.LinearRegression
Set the maximum number of iterations.
setMaxIterations(int) - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Sets the max number of k-means iterations to split clusters (default: 20).
setMaxIterations(int) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Set the maximum number of iterations allowed.
setMaxIterations(int) - Method in class org.apache.spark.mllib.clustering.KMeans
Set maximum number of iterations allowed.
setMaxIterations(int) - Method in class org.apache.spark.mllib.clustering.LDA
Set the maximum number of iterations allowed.
setMaxIterations(int) - Method in class org.apache.spark.mllib.clustering.PowerIterationClustering
Set maximum number of iterations of the power iteration loop
setMaxLocalProjDBSize(long) - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Sets the maximum number of items (including delimiters used in the internal storage format) allowed in a projected database before local processing (default: 32000000L).
setMaxMemoryInMB(int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setMaxMemoryInMB(int) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setMaxMemoryInMB(int) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setMaxMemoryInMB(int) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setMaxMemoryInMB(int) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMaxMemoryInMB(int) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setMaxMemoryInMB(int) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setMaxPatternLength(int) - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Sets maximal pattern length (default: 10).
setMaxSentenceLength(int) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets the maximum length (in words) of each sentence in the input data.
setMetricName(String) - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
setMetricName(String) - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
setMetricName(String) - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
setMin(double) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
setMin(double) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setMinConfidence(double) - Method in class org.apache.spark.mllib.fpm.AssociationRules
Sets the minimal confidence (default: 0.8).
setMinCount(int) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setMinCount(int) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets minCount, the minimum number of times a token must appear to be included in the word2vec model's vocabulary (default: 5).
setMinDF(double) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
setMinDivisibleClusterSize(double) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setMinDivisibleClusterSize(double) - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Sets the minimum number of points (if >= 1.0) or the minimum proportion of points (if < 1.0) of a divisible cluster (default: 1).
setMinDocFreq(int) - Method in class org.apache.spark.ml.feature.IDF
 
setMiniBatchFraction(double) - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
Set the fraction of each batch to use for updates.
setMiniBatchFraction(double) - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
Mini-batch fraction in (0, 1], which sets the fraction of document sampled and used in each iteration.
setMiniBatchFraction(double) - Method in class org.apache.spark.mllib.optimization.GradientDescent
:: Experimental :: Set fraction of data to be used for each SGD iteration.
setMiniBatchFraction(double) - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Set the fraction of each batch to use for updates.
setMinInfoGain(double) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setMinInfoGain(double) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setMinInfoGain(double) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setMinInfoGain(double) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setMinInfoGain(double) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setMinInfoGain(double) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setMinInfoGain(double) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setMinInfoGain(double) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setMinInfoGain(double) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setMinInfoGain(double) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMinInfoGain(double) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setMinInfoGain(double) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setMinInfoGain(double) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setMinInstancesPerNode(int) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setMinInstancesPerNode(int) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setMinInstancesPerNode(int) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setMinInstancesPerNode(int) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setMinInstancesPerNode(int) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setMinInstancesPerNode(int) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setMinInstancesPerNode(int) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setMinSupport(double) - Method in class org.apache.spark.mllib.fpm.FPGrowth
Sets the minimal support level (default: 0.3).
setMinSupport(double) - Method in class org.apache.spark.mllib.fpm.PrefixSpan
Sets the minimal support level (default: 0.1).
setMinTF(double) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
setMinTF(double) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setMinTokenLength(int) - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
setModelType(String) - Method in class org.apache.spark.ml.classification.NaiveBayes
Set the model type using a string (case-sensitive).
setModelType(String) - Method in class org.apache.spark.mllib.classification.NaiveBayes
Set the model type using a string (case-sensitive).
setN(int) - Method in class org.apache.spark.ml.feature.NGram
 
setName(String) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Assign a name to this RDD
setName(String) - Method in class org.apache.spark.api.java.JavaPairRDD
Assign a name to this RDD
setName(String) - Method in class org.apache.spark.api.java.JavaRDD
Assign a name to this RDD
setName(String) - Static method in class org.apache.spark.api.r.RRDD
 
setName(String) - Static method in class org.apache.spark.graphx.EdgeRDD
 
setName(String) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
setName(String) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
setName(String) - Static method in class org.apache.spark.graphx.VertexRDD
 
setName(String) - Static method in class org.apache.spark.rdd.HadoopRDD
 
setName(String) - Static method in class org.apache.spark.rdd.JdbcRDD
 
setName(String) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
setName(String) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
setName(String) - Method in class org.apache.spark.rdd.RDD
Assign a name to this RDD
setNames(String[]) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
setNonnegative(boolean) - Method in class org.apache.spark.ml.recommendation.ALS
 
setNonnegative(boolean) - Method in class org.apache.spark.mllib.recommendation.ALS
Set whether the least-squares problems solved at each iteration should have nonnegativity constraints.
setNumBlocks(int) - Method in class org.apache.spark.ml.recommendation.ALS
Sets both numUserBlocks and numItemBlocks to the specific value.
setNumBuckets(int) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setNumClasses(int) - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
Set the number of possible outcomes for k classes classification problem in Multinomial Logistic Regression.
setNumClasses(int) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setNumCorrections(int) - Method in class org.apache.spark.mllib.optimization.LBFGS
Set the number of corrections used in the LBFGS update.
setNumFeatures(int) - Method in class org.apache.spark.ml.feature.HashingTF
 
setNumFolds(int) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
setNumItemBlocks(int) - Method in class org.apache.spark.ml.recommendation.ALS
 
setNumIterations(int) - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
Set the number of iterations of gradient descent to run per update.
setNumIterations(int) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets number of iterations (default: 1), which should be smaller than or equal to number of partitions.
setNumIterations(int) - Method in class org.apache.spark.mllib.optimization.GradientDescent
Set the number of iterations for SGD.
setNumIterations(int) - Method in class org.apache.spark.mllib.optimization.LBFGS
Set the maximal number of iterations for L-BFGS.
setNumIterations(int) - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Set the number of iterations of gradient descent to run per update.
setNumIterations(int) - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
setNumPartitions(int) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setNumPartitions(int) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets number of partitions (default: 1).
setNumPartitions(int) - Method in class org.apache.spark.mllib.fpm.FPGrowth
Sets the number of partitions used by parallel FP-growth (default: same as input data).
setNumTopFeatures(int) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
setNumTrees(int) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setNumTrees(int) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setNumUserBlocks(int) - Method in class org.apache.spark.ml.recommendation.ALS
 
setOptimizeDocConcentration(boolean) - Method in class org.apache.spark.ml.clustering.LDA
 
setOptimizeDocConcentration(boolean) - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
Sets whether to optimize docConcentration parameter during training.
setOptimizer(String) - Method in class org.apache.spark.ml.clustering.LDA
 
setOptimizer(LDAOptimizer) - Method in class org.apache.spark.mllib.clustering.LDA
:: DeveloperApi ::
setOptimizer(String) - Method in class org.apache.spark.mllib.clustering.LDA
Set the LDAOptimizer used to perform the actual calculation by algorithm name.
setOrNull(long, int, int) - Method in class org.apache.spark.sql.types.Decimal
Set this Decimal to the given unscaled Long, with a given precision and scale, and return it, or return null if it cannot be set due to overflow.
setOutputCol(String) - Method in class org.apache.spark.ml.feature.Binarizer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.Bucketizer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.DCT
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.HashingTF
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.IDF
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.IDFModel
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.IndexToString
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.Interaction
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.MaxAbsScaler
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.NGram
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.Normalizer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.PCA
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.PCAModel
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.StandardScaler
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.StringIndexer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
setOutputCol(String) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.VectorAssembler
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.VectorIndexer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setOutputCol(String) - Method in class org.apache.spark.ml.feature.Word2VecModel
 
setOutputCol(String) - Method in class org.apache.spark.ml.UnaryTransformer
 
setOutputStream(OutputStream) - Method in class org.apache.spark.storage.memory.RedirectableOutputStream
 
setP(double) - Method in class org.apache.spark.ml.feature.Normalizer
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.Bucketizer
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.IDFModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.PCAModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
setParent(Estimator<M>) - Method in class org.apache.spark.ml.Model
Sets the parent of this model (Java API).
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.PipelineModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
setParent(Estimator<M>) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
setPattern(String) - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
setPeacePeriod(int) - Method in class org.apache.spark.mllib.stat.test.StreamingTest
Set the number of initial batches to ignore.
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setPredictionCol(String) - Method in class org.apache.spark.ml.classification.OneVsRest
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setPredictionCol(String) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setPredictionCol(String) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setPredictionCol(String) - Method in class org.apache.spark.ml.clustering.KMeans
 
setPredictionCol(String) - Method in class org.apache.spark.ml.clustering.KMeansModel
 
setPredictionCol(String) - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
setPredictionCol(String) - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
setPredictionCol(String) - Method in class org.apache.spark.ml.PredictionModel
 
setPredictionCol(String) - Method in class org.apache.spark.ml.Predictor
 
setPredictionCol(String) - Method in class org.apache.spark.ml.recommendation.ALS
 
setPredictionCol(String) - Method in class org.apache.spark.ml.recommendation.ALSModel
 
setPredictionCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setPredictionCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
setPredictionCol(String) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
setPredictionCol(String) - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setPredictionCol(String) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setProbabilityCol(String) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setProbabilityCol(String) - Method in class org.apache.spark.ml.classification.ProbabilisticClassifier
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setProbabilityCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setProbabilityCol(String) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setProductBlocks(int) - Method in class org.apache.spark.mllib.recommendation.ALS
Set the number of product blocks to parallelize the computation.
setPropertiesFile(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set a custom properties file with Spark configuration for the application.
setQuantileCalculationStrategy(Enumeration.Value) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setQuantileProbabilities(double[]) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setQuantileProbabilities(double[]) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setQuantilesCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
setQuantilesCol(String) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
setRandomCenters(int, double, long) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Initialize random centers, requiring only the number of dimensions.
setRank(int) - Method in class org.apache.spark.ml.recommendation.ALS
 
setRank(int) - Method in class org.apache.spark.mllib.recommendation.ALS
Set the rank of the feature matrices computed (number of features).
setRatingCol(String) - Method in class org.apache.spark.ml.recommendation.ALS
 
setRawPredictionCol(String) - Method in class org.apache.spark.ml.classification.ClassificationModel
 
setRawPredictionCol(String) - Method in class org.apache.spark.ml.classification.Classifier
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setRawPredictionCol(String) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setRawPredictionCol(String) - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
setRegParam(double) - Method in class org.apache.spark.ml.classification.LogisticRegression
Set the regularization parameter.
setRegParam(double) - Method in class org.apache.spark.ml.recommendation.ALS
 
setRegParam(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the regularization parameter for L2 regularization.
setRegParam(double) - Method in class org.apache.spark.ml.regression.LinearRegression
Set the regularization parameter.
setRegParam(double) - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
Set the regularization parameter.
setRegParam(double) - Method in class org.apache.spark.mllib.optimization.GradientDescent
Set the regularization parameter.
setRegParam(double) - Method in class org.apache.spark.mllib.optimization.LBFGS
Set the regularization parameter.
setRegParam(double) - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Set the regularization parameter.
setRelativeError(double) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setRequiredColumns(Configuration, StructType, StructType) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
setRest(long, int, VD, ED) - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
setRuns(int) - Method in class org.apache.spark.mllib.clustering.KMeans
This function has no effect since Spark 2.0.0.
setSample(RDD<Object>) - Method in class org.apache.spark.mllib.stat.KernelDensity
Sets the sample to use for density estimation.
setSample(JavaRDD<Double>) - Method in class org.apache.spark.mllib.stat.KernelDensity
Sets the sample to use for density estimation (for Java users).
setScalingVec(Vector) - Method in class org.apache.spark.ml.feature.ElementwiseProduct
 
setSeed(long) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setSeed(long) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setSeed(long) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setSeed(long) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setSeed(long) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
Set the seed for weights initialization if weights are not set
setSeed(long) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setSeed(long) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setSeed(long) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
setSeed(long) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
setSeed(long) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setSeed(long) - Method in class org.apache.spark.ml.clustering.KMeans
 
setSeed(long) - Method in class org.apache.spark.ml.clustering.LDA
 
setSeed(long) - Method in class org.apache.spark.ml.clustering.LDAModel
 
setSeed(long) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
setSeed(long) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
setSeed(long) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setSeed(long) - Method in class org.apache.spark.ml.recommendation.ALS
 
setSeed(long) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setSeed(long) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setSeed(long) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setSeed(long) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setSeed(long) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setSeed(long) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setSeed(long) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
setSeed(long) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setSeed(long) - Method in class org.apache.spark.mllib.clustering.BisectingKMeans
Sets the random seed (default: hash value of the class name).
setSeed(long) - Method in class org.apache.spark.mllib.clustering.GaussianMixture
Set the random seed
setSeed(long) - Method in class org.apache.spark.mllib.clustering.KMeans
Set the random seed for cluster initialization.
setSeed(long) - Method in class org.apache.spark.mllib.clustering.LDA
Set the random seed for cluster initialization.
setSeed(long) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets random seed (default: a random long integer).
setSeed(long) - Method in class org.apache.spark.mllib.random.ExponentialGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.random.GammaGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.random.LogNormalGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.random.PoissonGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.random.StandardNormalGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.random.UniformGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.random.WeibullGenerator
 
setSeed(long) - Method in class org.apache.spark.mllib.recommendation.ALS
Sets a random seed to have deterministic results.
setSeed(long) - Method in class org.apache.spark.util.random.BernoulliCellSampler
 
setSeed(long) - Method in class org.apache.spark.util.random.BernoulliSampler
 
setSeed(long) - Method in class org.apache.spark.util.random.PoissonSampler
 
setSeed(long) - Method in interface org.apache.spark.util.random.Pseudorandom
Set random seed.
setSerializer(Serializer) - Method in class org.apache.spark.rdd.CoGroupedRDD
Set a serializer for this RDD's shuffle, or null to use the default (spark.serializer)
setSerializer(Serializer) - Method in class org.apache.spark.rdd.ShuffledRDD
Set a serializer for this RDD's shuffle, or null to use the default (spark.serializer)
setSmoothing(double) - Method in class org.apache.spark.ml.classification.NaiveBayes
Set the smoothing parameter.
setSolver(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the solver algorithm used for optimization.
setSolver(String) - Method in class org.apache.spark.ml.regression.LinearRegression
Set the solver algorithm used for optimization.
setSparkHome(String) - Method in class org.apache.spark.launcher.SparkLauncher
Set a custom Spark installation location for the application.
setSparkHome(String) - Method in class org.apache.spark.SparkConf
Set the location where Spark is installed on worker nodes.
setSplits(double[]) - Method in class org.apache.spark.ml.feature.Bucketizer
 
setSrcOnly(long, int, VD) - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
setStackTrace(StackTraceElement[]) - Static method in exception org.apache.spark.sql.AnalysisException
 
setStackTrace(StackTraceElement[]) - Static method in exception org.apache.spark.sql.ContinuousQueryException
 
setStages(PipelineStage[]) - Method in class org.apache.spark.ml.Pipeline
 
setStandardization(boolean) - Method in class org.apache.spark.ml.classification.LogisticRegression
Whether to standardize the training features before fitting the model.
setStandardization(boolean) - Method in class org.apache.spark.ml.regression.LinearRegression
Whether to standardize the training features before fitting the model.
setStatement(String) - Method in class org.apache.spark.ml.feature.SQLTransformer
 
setStepSize(double) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setStepSize(double) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setStepSize(double) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setStepSize(double) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setStepSize(double) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setStepSize(double) - Method in class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
Set the step size for gradient descent.
setStepSize(double) - Method in class org.apache.spark.mllib.optimization.GradientDescent
Set the initial step size of SGD for the first step.
setStepSize(double) - Method in class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Set the step size for gradient descent.
setStopWords(String[]) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
setSubsamplingRate(double) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
setSubsamplingRate(double) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
setSubsamplingRate(double) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setSubsamplingRate(double) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setSubsamplingRate(double) - Method in class org.apache.spark.ml.clustering.LDA
 
setSubsamplingRate(double) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
setSubsamplingRate(double) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
setSubsamplingRate(double) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
setSubsamplingRate(double) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
setSubsamplingRate(double) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setTaskContext(TaskContext) - Static method in class org.apache.spark.TaskContext
Set the thread local TaskContext.
setTau0(double) - Method in class org.apache.spark.mllib.clustering.OnlineLDAOptimizer
A (positive) learning parameter that downweights early iterations.
setTestMethod(String) - Method in class org.apache.spark.mllib.stat.test.StreamingTest
Set the statistical method used for significance testing.
setThreshold(double) - Method in class org.apache.spark.ml.classification.LogisticRegression
 
setThreshold(double) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setThreshold(double) - Method in class org.apache.spark.ml.feature.Binarizer
 
setThreshold(double) - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
Sets the threshold that separates positive predictions from negative predictions in Binary Logistic Regression.
setThreshold(double) - Method in class org.apache.spark.mllib.classification.SVMModel
Sets the threshold that separates positive predictions from negative predictions.
setThresholds(double[]) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
setThresholds(double[]) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
setThresholds(double[]) - Method in class org.apache.spark.ml.classification.LogisticRegression
 
setThresholds(double[]) - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
setThresholds(double[]) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
setThresholds(double[]) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
setThresholds(double[]) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
setThresholds(double[]) - Method in class org.apache.spark.ml.classification.ProbabilisticClassifier
 
setThresholds(double[]) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
setThresholds(double[]) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
setTol(double) - Method in class org.apache.spark.ml.classification.LogisticRegression
Set the convergence tolerance of iterations.
setTol(double) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
Set the convergence tolerance of iterations.
setTol(double) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
setTol(double) - Method in class org.apache.spark.ml.clustering.KMeans
 
setTol(double) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
Set the convergence tolerance of iterations.
setTol(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the convergence tolerance of iterations.
setTol(double) - Method in class org.apache.spark.ml.regression.LinearRegression
Set the convergence tolerance of iterations.
setToLowercase(boolean) - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
setTopicConcentration(double) - Method in class org.apache.spark.ml.clustering.LDA
 
setTopicConcentration(double) - Method in class org.apache.spark.mllib.clustering.LDA
Concentration parameter (commonly named "beta" or "eta") for the prior placed on topics' distributions over terms.
setTopicDistributionCol(String) - Method in class org.apache.spark.ml.clustering.LDA
 
setTrainRatio(double) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
setTreeStrategy(Strategy) - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
setupContainerBuilderDockerInfo(String, SparkConf, Protos.ContainerInfo.Builder) - Static method in class org.apache.spark.scheduler.cluster.mesos.MesosSchedulerBackendUtil
Setup a docker containerizer
setUpdater(Updater) - Method in class org.apache.spark.mllib.optimization.GradientDescent
Set the updater function to actually perform a gradient step in a given direction.
setUpdater(Updater) - Method in class org.apache.spark.mllib.optimization.LBFGS
Set the updater function to actually perform a gradient step in a given direction.
setupGroups(int, DefaultPartitionCoalescer.PartitionLocations) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
Initializes targetLen partition groups.
setupSecureURLConnection(URLConnection, org.apache.spark.SecurityManager) - Static method in class org.apache.spark.util.Utils
If the given URL connection is HttpsURLConnection, it sets the SSL socket factory and the host verifier from the given security manager.
setUseNodeIdCache(boolean) - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
setUserBlocks(int) - Method in class org.apache.spark.mllib.recommendation.ALS
Set the number of user blocks to parallelize the computation.
setUserCol(String) - Method in class org.apache.spark.ml.recommendation.ALS
 
setUserCol(String) - Method in class org.apache.spark.ml.recommendation.ALSModel
 
setValidateData(boolean) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
setValidateData(boolean) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
setValidateData(boolean) - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
Set if the algorithm should validate data before training.
setValidateData(boolean) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
setValidateData(boolean) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
setValidateData(boolean) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
setValidationTol(double) - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
setValue(R) - Method in class org.apache.spark.Accumulable
Deprecated.
Set the accumulator's value.
setValue(R) - Static method in class org.apache.spark.Accumulator
Deprecated.
 
setVarianceCol(String) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
setVarianceCol(String) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
setVectorSize(int) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setVectorSize(int) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets vector size (default: 100).
setVerbose(boolean) - Method in class org.apache.spark.launcher.SparkLauncher
Enables verbose reporting for SparkSubmit.
setVocabSize(int) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
setWeightCol(String) - Method in class org.apache.spark.ml.classification.LogisticRegression
Whether to over-/under-sample training instances according to the given weights in weightCol.
setWeightCol(String) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
Sets the value of param weightCol.
setWeightCol(String) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
setWeightCol(String) - Method in class org.apache.spark.ml.regression.LinearRegression
Whether to over-/under-sample training instances according to the given weights in weightCol.
setWeights(Vector) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
Sets the model weights.
setWindowSize(int) - Method in class org.apache.spark.ml.feature.Word2Vec
 
setWindowSize(int) - Method in class org.apache.spark.mllib.feature.Word2Vec
Sets the window of words (default: 5)
setWindowSize(int) - Method in class org.apache.spark.mllib.stat.test.StreamingTest
Set the number of batches to compute significance tests over.
setWithMean(boolean) - Method in class org.apache.spark.ml.feature.StandardScaler
 
setWithMean(boolean) - Method in class org.apache.spark.mllib.feature.StandardScalerModel
 
setWithStd(boolean) - Method in class org.apache.spark.ml.feature.StandardScaler
 
setWithStd(boolean) - Method in class org.apache.spark.mllib.feature.StandardScalerModel
 
setWrappedContext(SQLContext) - Method in class org.apache.spark.sql.SparkSession
 
sha1(Column) - Static method in class org.apache.spark.sql.functions
Calculates the SHA-1 digest of a binary column and returns the value as a 40 character hex string.
sha2(Column, int) - Static method in class org.apache.spark.sql.functions
Calculates the SHA-2 family of hash functions of a binary column and returns the value as a hex string.
shape() - Method in class org.apache.spark.mllib.random.GammaGenerator
 
SharedParamsCodeGen - Class in org.apache.spark.ml.param.shared
Code generator for shared params (sharedParams.scala).
SharedParamsCodeGen() - Constructor for class org.apache.spark.ml.param.shared.SharedParamsCodeGen
 
sharedState() - Method in class org.apache.spark.sql.hive.HiveContext
Deprecated.
 
sharedState() - Method in class org.apache.spark.sql.SparkSession
State shared across sessions, including the SparkContext, cached data, listener, and a catalog that interacts with external systems.
sharedState() - Method in class org.apache.spark.sql.SQLContext
 
shiftLeft(Column, int) - Static method in class org.apache.spark.sql.functions
Shift the given value numBits left.
shiftRight(Column, int) - Static method in class org.apache.spark.sql.functions
Shift the given value numBits right.
shiftRightUnsigned(Column, int) - Static method in class org.apache.spark.sql.functions
Unsigned shift the given value numBits right.
SHORT() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable short type.
ShortestPaths - Class in org.apache.spark.graphx.lib
Computes shortest paths to the given set of landmark vertices, returning a graph where each vertex attribute is a map containing the shortest-path distance to each reachable landmark.
ShortestPaths() - Constructor for class org.apache.spark.graphx.lib.ShortestPaths
 
shortName() - Method in class org.apache.spark.ml.source.libsvm.DefaultSource
 
shortName() - Method in interface org.apache.spark.sql.sources.DataSourceRegister
The string that represents the format that this data source provider uses.
shortTimeUnitString(TimeUnit) - Static method in class org.apache.spark.streaming.ui.UIUtils
Return the short string for a TimeUnit.
ShortType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the ShortType object.
ShortType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing Short values.
shouldCloseFileAfterWrite(SparkConf, boolean) - Static method in class org.apache.spark.streaming.util.WriteAheadLogUtils
 
shouldDistributeGaussians(int, int) - Static method in class org.apache.spark.mllib.clustering.GaussianMixture
Heuristic to distribute the computation of the MultivariateGaussians, approximately when d > 25 except for when k is very small.
shouldGoLeft(Vector) - Method in interface org.apache.spark.ml.tree.Split
Return true (split to left) or false (split to right).
shouldGoLeft(int, Split[]) - Method in interface org.apache.spark.ml.tree.Split
Return true (split to left) or false (split to right).
shouldOverwrite() - Method in class org.apache.spark.ml.util.MLWriter
 
shouldOwn(Param<?>) - Method in interface org.apache.spark.ml.param.Params
Validates that the input param belongs to this instance.
show(int) - Method in class org.apache.spark.sql.Dataset
Displays the Dataset in a tabular form.
show() - Method in class org.apache.spark.sql.Dataset
Displays the top 20 rows of Dataset in a tabular form.
show(boolean) - Method in class org.apache.spark.sql.Dataset
Displays the top 20 rows of Dataset in a tabular form.
show(int, boolean) - Method in class org.apache.spark.sql.Dataset
Displays the Dataset in a tabular form.
showBytesDistribution(String, Function2<TaskInfo, TaskMetrics, Object>, Seq<Tuple2<TaskInfo, TaskMetrics>>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showBytesDistribution(String, Option<org.apache.spark.util.Distribution>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showBytesDistribution(String, org.apache.spark.util.Distribution) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showDagVizForJob(int, Seq<org.apache.spark.ui.scope.RDDOperationGraph>) - Static method in class org.apache.spark.ui.UIUtils
Return a "DAG visualization" DOM element that expands into a visualization for a job.
showDagVizForStage(int, Option<org.apache.spark.ui.scope.RDDOperationGraph>) - Static method in class org.apache.spark.ui.UIUtils
Return a "DAG visualization" DOM element that expands into a visualization for a stage.
showDistribution(String, org.apache.spark.util.Distribution, Function1<Object, String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showDistribution(String, Option<org.apache.spark.util.Distribution>, Function1<Object, String>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showDistribution(String, Option<org.apache.spark.util.Distribution>, String) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showDistribution(String, String, Function2<TaskInfo, TaskMetrics, Object>, Seq<Tuple2<TaskInfo, TaskMetrics>>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showMillisDistribution(String, Option<org.apache.spark.util.Distribution>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showMillisDistribution(String, Function2<TaskInfo, TaskMetrics, Object>, Seq<Tuple2<TaskInfo, TaskMetrics>>) - Static method in class org.apache.spark.scheduler.StatsReportListener
 
showMillisDistribution(String, Function1<BatchInfo, Option<Object>>) - Method in class org.apache.spark.streaming.scheduler.StatsReportListener
 
SHUFFLE() - Static method in class org.apache.spark.storage.BlockId
 
SHUFFLE_DATA() - Static method in class org.apache.spark.storage.BlockId
 
SHUFFLE_INDEX() - Static method in class org.apache.spark.storage.BlockId
 
SHUFFLE_READ() - Static method in class org.apache.spark.ui.ToolTips
 
SHUFFLE_READ_BLOCKED_TIME() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
SHUFFLE_READ_BLOCKED_TIME() - Static method in class org.apache.spark.ui.ToolTips
 
SHUFFLE_READ_METRICS_PREFIX() - Static method in class org.apache.spark.InternalAccumulator
 
SHUFFLE_READ_REMOTE_SIZE() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
SHUFFLE_READ_REMOTE_SIZE() - Static method in class org.apache.spark.ui.ToolTips
 
SHUFFLE_WRITE() - Static method in class org.apache.spark.ui.ToolTips
 
SHUFFLE_WRITE_METRICS_PREFIX() - Static method in class org.apache.spark.InternalAccumulator
 
ShuffleBlockId - Class in org.apache.spark.storage
 
ShuffleBlockId(int, int, int) - Constructor for class org.apache.spark.storage.ShuffleBlockId
 
ShuffleDataBlockId - Class in org.apache.spark.storage
 
ShuffleDataBlockId(int, int, int) - Constructor for class org.apache.spark.storage.ShuffleDataBlockId
 
ShuffleDependency<K,V,C> - Class in org.apache.spark
:: DeveloperApi :: Represents a dependency on the output of a shuffle stage.
ShuffleDependency(RDD<? extends Product2<K, V>>, Partitioner, Serializer, Option<Ordering<K>>, Option<Aggregator<K, V, C>>, boolean, ClassTag<K>, ClassTag<V>, ClassTag<C>) - Constructor for class org.apache.spark.ShuffleDependency
 
ShuffledRDD<K,V,C> - Class in org.apache.spark.rdd
:: DeveloperApi :: The resulting RDD from a shuffle (e.g.
ShuffledRDD(RDD<? extends Product2<K, V>>, Partitioner, ClassTag<K>, ClassTag<V>, ClassTag<C>) - Constructor for class org.apache.spark.rdd.ShuffledRDD
 
shuffleHandle() - Method in class org.apache.spark.ShuffleDependency
 
shuffleId() - Method in class org.apache.spark.CleanShuffle
 
shuffleId() - Method in class org.apache.spark.FetchFailed
 
shuffleId() - Method in class org.apache.spark.ShuffleDependency
 
shuffleId() - Method in class org.apache.spark.storage.BlockManagerMessages.RemoveShuffle
 
shuffleId() - Method in class org.apache.spark.storage.ShuffleBlockId
 
shuffleId() - Method in class org.apache.spark.storage.ShuffleDataBlockId
 
shuffleId() - Method in class org.apache.spark.storage.ShuffleIndexBlockId
 
ShuffleIndexBlockId - Class in org.apache.spark.storage
 
ShuffleIndexBlockId(int, int, int) - Constructor for class org.apache.spark.storage.ShuffleIndexBlockId
 
shuffleManager() - Method in class org.apache.spark.SparkEnv
 
shuffleRead() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
shuffleRead() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
shuffleReadBytes() - Method in class org.apache.spark.status.api.v1.StageData
 
ShuffleReadMetricDistributions - Class in org.apache.spark.status.api.v1
 
ShuffleReadMetrics - Class in org.apache.spark.status.api.v1
 
shuffleReadMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
shuffleReadMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
shuffleReadRecords() - Method in class org.apache.spark.status.api.v1.StageData
 
shuffleReadRecords() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
shuffleReadRecords() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
shuffleReadTotalBytes() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
shuffleRegistered() - Method in class org.apache.spark.scheduler.cluster.mesos.Slave
 
shuffleWrite() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
shuffleWrite() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
shuffleWriteBytes() - Method in class org.apache.spark.status.api.v1.StageData
 
shuffleWriteBytes() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
ShuffleWriteMetricDistributions - Class in org.apache.spark.status.api.v1
 
ShuffleWriteMetrics - Class in org.apache.spark.status.api.v1
 
shuffleWriteMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetricDistributions
 
shuffleWriteMetrics() - Method in class org.apache.spark.status.api.v1.TaskMetrics
 
shuffleWriteRecords() - Method in class org.apache.spark.status.api.v1.StageData
 
shuffleWriteRecords() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
shuffleWriteRecords() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
ShutdownHookManager - Class in org.apache.spark.util
Various utility methods used by Spark.
ShutdownHookManager() - Constructor for class org.apache.spark.util.ShutdownHookManager
 
sideEffectResult() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
Inserts all the rows in the table into Hive.
sigma() - Method in class org.apache.spark.mllib.stat.distribution.MultivariateGaussian
 
sigmas() - Method in class org.apache.spark.mllib.clustering.ExpectationSum
 
SignalUtils - Class in org.apache.spark.util
Contains utilities for working with posix signals.
SignalUtils() - Constructor for class org.apache.spark.util.SignalUtils
 
signum(Column) - Static method in class org.apache.spark.sql.functions
Computes the signum of the given value.
signum(String) - Static method in class org.apache.spark.sql.functions
Computes the signum of the given column.
SimpleFutureAction<T> - Class in org.apache.spark
A FutureAction holding the result of an action that triggers a single job.
simpleString() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
simpleString() - Method in class org.apache.spark.sql.types.ArrayType
 
simpleString() - Static method in class org.apache.spark.sql.types.BinaryType
 
simpleString() - Static method in class org.apache.spark.sql.types.BooleanType
 
simpleString() - Method in class org.apache.spark.sql.types.ByteType
 
simpleString() - Static method in class org.apache.spark.sql.types.CalendarIntervalType
 
simpleString() - Method in class org.apache.spark.sql.types.DataType
Readable string representation for the type.
simpleString() - Static method in class org.apache.spark.sql.types.DateType
 
simpleString() - Method in class org.apache.spark.sql.types.DecimalType
 
simpleString() - Static method in class org.apache.spark.sql.types.DoubleType
 
simpleString() - Static method in class org.apache.spark.sql.types.FloatType
 
simpleString() - Method in class org.apache.spark.sql.types.IntegerType
 
simpleString() - Method in class org.apache.spark.sql.types.LongType
 
simpleString() - Method in class org.apache.spark.sql.types.MapType
 
simpleString() - Static method in class org.apache.spark.sql.types.NullType
 
simpleString() - Method in class org.apache.spark.sql.types.ShortType
 
simpleString() - Static method in class org.apache.spark.sql.types.StringType
 
simpleString() - Method in class org.apache.spark.sql.types.StructType
 
simpleString() - Static method in class org.apache.spark.sql.types.TimestampType
 
SimpleUpdater - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: A simple updater for gradient descent *without* any regularization.
SimpleUpdater() - Constructor for class org.apache.spark.mllib.optimization.SimpleUpdater
 
sin(Column) - Static method in class org.apache.spark.sql.functions
Computes the sine of the given value.
sin(String) - Static method in class org.apache.spark.sql.functions
Computes the sine of the given column.
SingularValueDecomposition<UType,VType> - Class in org.apache.spark.mllib.linalg
Represents singular value decomposition (SVD) factors.
SingularValueDecomposition(UType, Vector, VType) - Constructor for class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
sinh(Column) - Static method in class org.apache.spark.sql.functions
Computes the hyperbolic sine of the given value.
sinh(String) - Static method in class org.apache.spark.sql.functions
Computes the hyperbolic sine of the given column.
sinkStatus() - Method in interface org.apache.spark.sql.ContinuousQuery
Returns current status of the sink.
SinkStatus - Class in org.apache.spark.sql
:: Experimental :: Status and metrics of a streaming Sink.
size() - Method in class org.apache.spark.api.java.JavaUtils.SerializableMapWrapper
 
size() - Method in class org.apache.spark.ml.attribute.AttributeGroup
Size of the attribute group.
size() - Method in class org.apache.spark.ml.linalg.DenseVector
 
size() - Method in class org.apache.spark.ml.linalg.SparseVector
 
size() - Method in interface org.apache.spark.ml.linalg.Vector
Size of the vector.
size() - Method in class org.apache.spark.ml.param.ParamMap
Number of param pairs in this map.
size() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
size() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
size() - Method in interface org.apache.spark.mllib.linalg.Vector
Size of the vector.
size(Column) - Static method in class org.apache.spark.sql.functions
Returns length of array or map.
size() - Method in interface org.apache.spark.sql.Row
Number of elements in the Row.
size() - Static method in class org.apache.spark.sql.types.StructType
 
size() - Method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
size() - Method in interface org.apache.spark.storage.memory.MemoryEntry
 
size() - Method in class org.apache.spark.storage.memory.SerializedMemoryEntry
 
SizeEstimator - Class in org.apache.spark.util
:: DeveloperApi :: Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in memory-aware caches.
SizeEstimator() - Constructor for class org.apache.spark.util.SizeEstimator
 
sizeInBytes() - Method in class org.apache.spark.sql.sources.BaseRelation
Returns an estimated size of this relation in bytes.
sketch(RDD<K>, int, ClassTag<K>) - Static method in class org.apache.spark.RangePartitioner
Sketches the input RDD via reservoir sampling on each partition.
skewness(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the skewness of the values in a group.
skewness(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the skewness of the values in a group.
skip(long) - Method in class org.apache.spark.io.LZ4BlockInputStream
 
skip(long) - Method in class org.apache.spark.storage.BufferReleasingInputStream
 
skip(long) - Method in class org.apache.spark.util.io.ChunkedByteBufferInputStream
 
skippedStages() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
skipWhitespace() - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
Slave - Class in org.apache.spark.scheduler.cluster.mesos
 
Slave(String) - Constructor for class org.apache.spark.scheduler.cluster.mesos.Slave
 
slice(int, int) - Static method in class org.apache.spark.sql.types.StructType
 
slice(Time, Time) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
slice(Time, Time) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return all the RDDs between 'fromDuration' to 'toDuration' (both included)
slice(Time, Time) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
slice(Time, Time) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
slice(Time, Time) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
slice(Time, Time) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
slice(Time, Time) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
slice(org.apache.spark.streaming.Interval) - Method in class org.apache.spark.streaming.dstream.DStream
Return all the RDDs defined by the Interval object (both end times included)
slice(Time, Time) - Method in class org.apache.spark.streaming.dstream.DStream
Return all the RDDs between 'fromTime' to 'toTime' (both included)
slideDuration() - Method in class org.apache.spark.streaming.dstream.DStream
Time interval after which the DStream generates a RDD
slideDuration() - Method in class org.apache.spark.streaming.dstream.InputDStream
 
sliding(int, int) - Method in class org.apache.spark.mllib.rdd.RDDFunctions
Returns a RDD from grouping items of its parent RDD in fixed size blocks by passing a sliding window over them.
sliding(int) - Method in class org.apache.spark.mllib.rdd.RDDFunctions
sliding(Int, Int)* with step = 1.
sliding(int) - Static method in class org.apache.spark.sql.types.StructType
 
sliding(int, int) - Static method in class org.apache.spark.sql.types.StructType
 
smoothing() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
smoothing() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
SnappyCompressionCodec - Class in org.apache.spark.io
:: DeveloperApi :: Snappy implementation of CompressionCodec.
SnappyCompressionCodec(SparkConf) - Constructor for class org.apache.spark.io.SnappyCompressionCodec
 
SnappyOutputStreamWrapper - Class in org.apache.spark.io
Wrapper over SnappyOutputStream which guards against write-after-close and double-close issues.
SnappyOutputStreamWrapper(SnappyOutputStream) - Constructor for class org.apache.spark.io.SnappyOutputStreamWrapper
 
socketStream(String, int, Function<InputStream, Iterable<T>>, StorageLevel) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from network source hostname:port.
socketStream(String, int, Function1<InputStream, Iterator<T>>, StorageLevel, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Creates an input stream from TCP source hostname:port.
socketTextStream(String, int, StorageLevel) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from network source hostname:port.
socketTextStream(String, int) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream from network source hostname:port.
socketTextStream(String, int, StorageLevel) - Method in class org.apache.spark.streaming.StreamingContext
Creates an input stream from TCP source hostname:port.
solve(double[], double[]) - Static method in class org.apache.spark.mllib.linalg.CholeskyDecomposition
Solves a symmetric positive definite linear system via Cholesky factorization.
solve(double[], double[], NNLS.Workspace) - Static method in class org.apache.spark.mllib.optimization.NNLS
Solve a least squares problem, possibly with nonnegativity constraints, by a modified projected gradient method.
solver() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
solver() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
solver() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
solver() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionTrainingSummary
 
solver() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
solver() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
Sort() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
sort(String, String...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the specified column, all in ascending order.
sort(Column...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the given expressions.
sort(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the specified column, all in ascending order.
sort(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset sorted by the given expressions.
sort_array(Column) - Static method in class org.apache.spark.sql.functions
Sorts the input array for the given column in ascending order, according to the natural ordering of the array elements.
sort_array(Column, boolean) - Static method in class org.apache.spark.sql.functions
Sorts the input array for the given column in ascending / descending order, according to the natural ordering of the array elements.
sortBy(Function<T, S>, boolean, int) - Method in class org.apache.spark.api.java.JavaRDD
Return this RDD sorted by the given key function.
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.api.r.RRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.graphx.VertexRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
sortBy(Function1<T, K>, boolean, int, Ordering<K>, ClassTag<K>) - Method in class org.apache.spark.rdd.RDD
Return this RDD sorted by the given key function.
sortBy(String, String...) - Method in class org.apache.spark.sql.DataFrameWriter
Sorts the output in each bucket by the given columns.
sortBy(String, Seq<String>) - Method in class org.apache.spark.sql.DataFrameWriter
Sorts the output in each bucket by the given columns.
sortBy(Function1<A, B>, Ordering<B>) - Static method in class org.apache.spark.sql.types.StructType
 
sortBy$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
sortBy$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
sortBy$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
sortBy$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
sortBy$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
sortBy$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
sortBy$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
sortBy$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
sortBy$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
sortBy$default$3() - Static method in class org.apache.spark.api.r.RRDD
 
sortBy$default$3() - Static method in class org.apache.spark.graphx.EdgeRDD
 
sortBy$default$3() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
sortBy$default$3() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
sortBy$default$3() - Static method in class org.apache.spark.graphx.VertexRDD
 
sortBy$default$3() - Static method in class org.apache.spark.rdd.HadoopRDD
 
sortBy$default$3() - Static method in class org.apache.spark.rdd.JdbcRDD
 
sortBy$default$3() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
sortBy$default$3() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
sortByKey() - Method in class org.apache.spark.api.java.JavaPairRDD
Sort the RDD by key, so that each partition contains a sorted range of the elements in ascending order.
sortByKey(boolean) - Method in class org.apache.spark.api.java.JavaPairRDD
Sort the RDD by key, so that each partition contains a sorted range of the elements.
sortByKey(boolean, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Sort the RDD by key, so that each partition contains a sorted range of the elements.
sortByKey(Comparator<K>) - Method in class org.apache.spark.api.java.JavaPairRDD
Sort the RDD by key, so that each partition contains a sorted range of the elements.
sortByKey(Comparator<K>, boolean) - Method in class org.apache.spark.api.java.JavaPairRDD
Sort the RDD by key, so that each partition contains a sorted range of the elements.
sortByKey(Comparator<K>, boolean, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Sort the RDD by key, so that each partition contains a sorted range of the elements.
sortByKey(boolean, int) - Method in class org.apache.spark.rdd.OrderedRDDFunctions
Sort the RDD by key, so that each partition contains a sorted range of the elements.
sorted(Ordering<B>) - Static method in class org.apache.spark.sql.types.StructType
 
sortWith(Function2<A, A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
sortWithinPartitions(String, String...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with each partition sorted by the given expressions.
sortWithinPartitions(Column...) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with each partition sorted by the given expressions.
sortWithinPartitions(String, Seq<String>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with each partition sorted by the given expressions.
sortWithinPartitions(Seq<Column>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with each partition sorted by the given expressions.
soundex(Column) - Static method in class org.apache.spark.sql.functions
* Return the soundex code for the specified expression.
sourceSchema(SQLContext, Option<StructType>, String, Map<String, String>) - Method in interface org.apache.spark.sql.sources.StreamSourceProvider
Returns the name and schema of the source that can be used to continually read data.
SourceStatus - Class in org.apache.spark.sql
:: Experimental :: Status and metrics of a streaming Source.
sourceStatuses() - Method in interface org.apache.spark.sql.ContinuousQuery
Returns current status of all the sources.
sourceToSerDe(String, SQLConf) - Static method in class org.apache.spark.sql.internal.HiveSerDe
Get the Hive SerDe information from the data source abbreviation string or classname.
span(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
spark() - Method in class org.apache.spark.status.api.v1.VersionInfo
 
SPARK_CONTEXT_SHUTDOWN_PRIORITY() - Static method in class org.apache.spark.util.ShutdownHookManager
The shutdown priority of the SparkContext instance.
SPARK_MASTER - Static variable in class org.apache.spark.launcher.SparkLauncher
The Spark master.
spark_partition_id() - Static method in class org.apache.spark.sql.functions
Partition ID of the Spark task.
SPARK_REGEX() - Static method in class org.apache.spark.SparkMasterRegex
 
SparkAppHandle - Interface in org.apache.spark.launcher
A handle to a running Spark application.
SparkAppHandle.Listener - Interface in org.apache.spark.launcher
Listener for updates to a handle's state.
SparkAppHandle.State - Enum in org.apache.spark.launcher
Represents the application's state.
SparkConf - Class in org.apache.spark
Configuration for a Spark application.
SparkConf(boolean) - Constructor for class org.apache.spark.SparkConf
 
SparkConf() - Constructor for class org.apache.spark.SparkConf
Create a SparkConf that loads defaults from system properties and the classpath
sparkContext() - Static method in class org.apache.spark.api.r.RRDD
 
sparkContext() - Static method in class org.apache.spark.graphx.EdgeRDD
 
sparkContext() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
sparkContext() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
sparkContext() - Static method in class org.apache.spark.graphx.VertexRDD
 
sparkContext() - Static method in class org.apache.spark.rdd.HadoopRDD
 
sparkContext() - Static method in class org.apache.spark.rdd.JdbcRDD
 
sparkContext() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
sparkContext() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
sparkContext() - Method in class org.apache.spark.rdd.RDD
The SparkContext that created this RDD.
SparkContext - Class in org.apache.spark
Main entry point for Spark functionality.
SparkContext(SparkConf) - Constructor for class org.apache.spark.SparkContext
 
SparkContext() - Constructor for class org.apache.spark.SparkContext
Create a SparkContext that loads settings from system properties (for instance, when launching with ./bin/spark-submit).
SparkContext(String, String, SparkConf) - Constructor for class org.apache.spark.SparkContext
Alternative constructor that allows setting common Spark properties directly
SparkContext(String, String, String, Seq<String>, Map<String, String>) - Constructor for class org.apache.spark.SparkContext
Alternative constructor that allows setting common Spark properties directly
sparkContext() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
sparkContext() - Method in class org.apache.spark.sql.SparkSession
 
sparkContext() - Method in class org.apache.spark.sql.SQLContext
 
sparkContext() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
The underlying SparkContext
sparkContext() - Method in class org.apache.spark.streaming.StreamingContext
Return the associated Spark context
SparkEnv - Class in org.apache.spark
:: DeveloperApi :: Holds all the runtime environment objects for a running Spark instance (either master or worker), including the serializer, RpcEnv, block manager, map output tracker, etc.
SparkEnv(String, org.apache.spark.rpc.RpcEnv, Serializer, Serializer, org.apache.spark.serializer.SerializerManager, MapOutputTracker, ShuffleManager, org.apache.spark.broadcast.BroadcastManager, org.apache.spark.storage.BlockManager, SecurityManager, org.apache.spark.metrics.MetricsSystem, MemoryManager, org.apache.spark.scheduler.OutputCommitCoordinator, SparkConf) - Constructor for class org.apache.spark.SparkEnv
 
sparkEventFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
--------------------------------------------------- * JSON deserialization methods for SparkListenerEvents |
sparkEventToJson(SparkListenerEvent) - Static method in class org.apache.spark.util.JsonProtocol
------------------------------------------------- * JSON serialization methods for SparkListenerEvents |
SparkException - Exception in org.apache.spark
 
SparkException(String, Throwable) - Constructor for exception org.apache.spark.SparkException
 
SparkException(String) - Constructor for exception org.apache.spark.SparkException
 
SparkExecutorInfo - Interface in org.apache.spark
Exposes information about Spark Executors.
SparkExecutorInfoImpl - Class in org.apache.spark
 
SparkExecutorInfoImpl(String, int, long, int) - Constructor for class org.apache.spark.SparkExecutorInfoImpl
 
SparkExitCode - Class in org.apache.spark.util
 
SparkExitCode() - Constructor for class org.apache.spark.util.SparkExitCode
 
SparkFiles - Class in org.apache.spark
Resolves paths to files added through SparkContext.addFile().
SparkFiles() - Constructor for class org.apache.spark.SparkFiles
 
SparkFirehoseListener - Class in org.apache.spark
Class that allows users to receive all SparkListener events.
SparkFirehoseListener() - Constructor for class org.apache.spark.SparkFirehoseListener
 
SparkFlumeEvent - Class in org.apache.spark.streaming.flume
A wrapper class for AvroFlumeEvent's with a custom serialization format.
SparkFlumeEvent() - Constructor for class org.apache.spark.streaming.flume.SparkFlumeEvent
 
SparkHadoopMapRedUtil - Class in org.apache.spark.mapred
 
SparkHadoopMapRedUtil() - Constructor for class org.apache.spark.mapred.SparkHadoopMapRedUtil
 
sparkJavaOpts(SparkConf, Function1<String, Object>) - Static method in class org.apache.spark.util.Utils
Convert all spark properties set in the given SparkConf to a sequence of java options.
SparkJobInfo - Interface in org.apache.spark
Exposes information about Spark Jobs.
SparkJobInfoImpl - Class in org.apache.spark
 
SparkJobInfoImpl(int, int[], JobExecutionStatus) - Constructor for class org.apache.spark.SparkJobInfoImpl
 
SparkLauncher - Class in org.apache.spark.launcher
Launcher for Spark applications.
SparkLauncher() - Constructor for class org.apache.spark.launcher.SparkLauncher
 
SparkLauncher(Map<String, String>) - Constructor for class org.apache.spark.launcher.SparkLauncher
Creates a launcher that will set the given environment variables in the child.
SparkListener - Class in org.apache.spark.scheduler
:: DeveloperApi :: A default implementation for SparkListenerInterface that has no-op implementations for all callbacks.
SparkListener() - Constructor for class org.apache.spark.scheduler.SparkListener
 
SparkListenerApplicationEnd - Class in org.apache.spark.scheduler
 
SparkListenerApplicationEnd(long) - Constructor for class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
SparkListenerApplicationStart - Class in org.apache.spark.scheduler
 
SparkListenerApplicationStart(String, Option<String>, long, String, Option<String>, Option<Map<String, String>>) - Constructor for class org.apache.spark.scheduler.SparkListenerApplicationStart
 
SparkListenerBlockManagerAdded - Class in org.apache.spark.scheduler
 
SparkListenerBlockManagerAdded(long, BlockManagerId, long) - Constructor for class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
SparkListenerBlockManagerRemoved - Class in org.apache.spark.scheduler
 
SparkListenerBlockManagerRemoved(long, BlockManagerId) - Constructor for class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
SparkListenerBlockUpdated - Class in org.apache.spark.scheduler
 
SparkListenerBlockUpdated(BlockUpdatedInfo) - Constructor for class org.apache.spark.scheduler.SparkListenerBlockUpdated
 
SparkListenerEnvironmentUpdate - Class in org.apache.spark.scheduler
 
SparkListenerEnvironmentUpdate(Map<String, Seq<Tuple2<String, String>>>) - Constructor for class org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
 
SparkListenerEvent - Interface in org.apache.spark.scheduler
 
SparkListenerExecutorAdded - Class in org.apache.spark.scheduler
 
SparkListenerExecutorAdded(long, String, ExecutorInfo) - Constructor for class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
SparkListenerExecutorMetricsUpdate - Class in org.apache.spark.scheduler
Periodic updates from executors.
SparkListenerExecutorMetricsUpdate(String, Seq<Tuple4<Object, Object, Object, Seq<AccumulableInfo>>>) - Constructor for class org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate
 
SparkListenerExecutorRemoved - Class in org.apache.spark.scheduler
 
SparkListenerExecutorRemoved(long, String, String) - Constructor for class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
SparkListenerJobEnd - Class in org.apache.spark.scheduler
 
SparkListenerJobEnd(int, long, JobResult) - Constructor for class org.apache.spark.scheduler.SparkListenerJobEnd
 
SparkListenerJobStart - Class in org.apache.spark.scheduler
 
SparkListenerJobStart(int, long, Seq<StageInfo>, Properties) - Constructor for class org.apache.spark.scheduler.SparkListenerJobStart
 
SparkListenerStageCompleted - Class in org.apache.spark.scheduler
 
SparkListenerStageCompleted(StageInfo) - Constructor for class org.apache.spark.scheduler.SparkListenerStageCompleted
 
SparkListenerStageSubmitted - Class in org.apache.spark.scheduler
 
SparkListenerStageSubmitted(StageInfo, Properties) - Constructor for class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
SparkListenerTaskEnd - Class in org.apache.spark.scheduler
 
SparkListenerTaskEnd(int, int, String, TaskEndReason, TaskInfo, TaskMetrics) - Constructor for class org.apache.spark.scheduler.SparkListenerTaskEnd
 
SparkListenerTaskGettingResult - Class in org.apache.spark.scheduler
 
SparkListenerTaskGettingResult(TaskInfo) - Constructor for class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
SparkListenerTaskStart - Class in org.apache.spark.scheduler
 
SparkListenerTaskStart(int, int, TaskInfo) - Constructor for class org.apache.spark.scheduler.SparkListenerTaskStart
 
SparkListenerUnpersistRDD - Class in org.apache.spark.scheduler
 
SparkListenerUnpersistRDD(int) - Constructor for class org.apache.spark.scheduler.SparkListenerUnpersistRDD
 
SparkMasterRegex - Class in org.apache.spark
A collection of regexes for extracting information from the master string.
SparkMasterRegex() - Constructor for class org.apache.spark.SparkMasterRegex
 
SparkOrcNewRecordReader - Class in org.apache.hadoop.hive.ql.io.orc
This is based on hive-exec-1.2.1 OrcNewInputFormat.OrcRecordReader.
SparkOrcNewRecordReader(Reader, Configuration, long, long) - Constructor for class org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader
 
sparkProperties() - Method in class org.apache.spark.ui.env.EnvironmentListener
 
sparkRPackagePath(boolean) - Static method in class org.apache.spark.api.r.RUtils
Get the list of paths for R packages in various deployment modes, of which the first path is for the SparkR package itself.
sparkSession() - Static method in class org.apache.spark.ml.r.RWrappers
 
sparkSession() - Method in interface org.apache.spark.sql.ContinuousQuery
Returns the SparkSession associated with this.
sparkSession() - Method in class org.apache.spark.sql.Dataset
 
SparkSession - Class in org.apache.spark.sql
The entry point to programming Spark with the Dataset and DataFrame API.
sparkSession() - Method in class org.apache.spark.sql.SQLContext
 
SparkSession.Builder - Class in org.apache.spark.sql
Builder for SparkSession.
SparkSession.Builder() - Constructor for class org.apache.spark.sql.SparkSession.Builder
 
SparkSession.implicits$ - Class in org.apache.spark.sql
:: Experimental :: (Scala-specific) Implicit methods available in Scala for converting common Scala objects into DataFrames.
SparkSession.implicits$() - Constructor for class org.apache.spark.sql.SparkSession.implicits$
 
SparkShutdownHook - Class in org.apache.spark.util
 
SparkShutdownHook(int, Function0<BoxedUnit>) - Constructor for class org.apache.spark.util.SparkShutdownHook
 
SparkStageInfo - Interface in org.apache.spark
Exposes information about Spark Stages.
SparkStageInfoImpl - Class in org.apache.spark
 
SparkStageInfoImpl(int, int, long, String, int, int, int, int) - Constructor for class org.apache.spark.SparkStageInfoImpl
 
SparkStatusTracker - Class in org.apache.spark
Low-level status reporting APIs for monitoring job and stage progress.
SparkUncaughtExceptionHandler - Class in org.apache.spark.util
The default uncaught exception handler for Executors terminates the whole process, to avoid getting into a bad state indefinitely.
SparkUncaughtExceptionHandler() - Constructor for class org.apache.spark.util.SparkUncaughtExceptionHandler
 
sparkUser() - Method in class org.apache.spark.api.java.JavaSparkContext
 
sparkUser() - Method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
sparkUser() - Method in class org.apache.spark.SparkContext
 
sparkUser() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
sparse(int, int, int[], int[], double[]) - Static method in class org.apache.spark.ml.linalg.Matrices
Creates a column-major sparse matrix in Compressed Sparse Column (CSC) format.
sparse(int, int[], double[]) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a sparse vector providing its index array and value array.
sparse(int, Seq<Tuple2<Object, Object>>) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a sparse vector using unordered (index, value) pairs.
sparse(int, Iterable<Tuple2<Integer, Double>>) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a sparse vector using unordered (index, value) pairs in a Java friendly way.
sparse(int, int, int[], int[], double[]) - Static method in class org.apache.spark.mllib.linalg.Matrices
Creates a column-major sparse matrix in Compressed Sparse Column (CSC) format.
sparse(int, int[], double[]) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a sparse vector providing its index array and value array.
sparse(int, Seq<Tuple2<Object, Object>>) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a sparse vector using unordered (index, value) pairs.
sparse(int, Iterable<Tuple2<Integer, Double>>) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a sparse vector using unordered (index, value) pairs in a Java friendly way.
SparseMatrix - Class in org.apache.spark.ml.linalg
Column-major sparse matrix.
SparseMatrix(int, int, int[], int[], double[], boolean) - Constructor for class org.apache.spark.ml.linalg.SparseMatrix
 
SparseMatrix(int, int, int[], int[], double[]) - Constructor for class org.apache.spark.ml.linalg.SparseMatrix
Column-major sparse matrix.
SparseMatrix - Class in org.apache.spark.mllib.linalg
Column-major sparse matrix.
SparseMatrix(int, int, int[], int[], double[], boolean) - Constructor for class org.apache.spark.mllib.linalg.SparseMatrix
 
SparseMatrix(int, int, int[], int[], double[]) - Constructor for class org.apache.spark.mllib.linalg.SparseMatrix
Column-major sparse matrix.
SparseVector - Class in org.apache.spark.ml.linalg
A sparse vector represented by an index array and an value array.
SparseVector(int, int[], double[]) - Constructor for class org.apache.spark.ml.linalg.SparseVector
 
SparseVector - Class in org.apache.spark.mllib.linalg
A sparse vector represented by an index array and an value array.
SparseVector(int, int[], double[]) - Constructor for class org.apache.spark.mllib.linalg.SparseVector
 
SPARSITY() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
sparsity() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
spdiag(Vector) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
Generate a diagonal matrix in SparseMatrix format from the supplied values.
spdiag(Vector) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
Generate a diagonal matrix in SparseMatrix format from the supplied values.
SpearmanCorrelation - Class in org.apache.spark.mllib.stat.correlation
Compute Spearman's correlation for two RDDs of the type RDD[Double] or the correlation matrix for an RDD of the type RDD[Vector].
SpearmanCorrelation() - Constructor for class org.apache.spark.mllib.stat.correlation.SpearmanCorrelation
 
SpecialLengths - Class in org.apache.spark.api.r
 
SpecialLengths() - Constructor for class org.apache.spark.api.r.SpecialLengths
 
speculative() - Method in class org.apache.spark.scheduler.TaskInfo
 
speculative() - Method in class org.apache.spark.status.api.v1.TaskData
 
speye(int) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a sparse Identity Matrix in Matrix format.
speye(int) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
Generate an Identity Matrix in SparseMatrix format.
speye(int) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a sparse Identity Matrix in Matrix format.
speye(int) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
Generate an Identity Matrix in SparseMatrix format.
SpillListener - Class in org.apache.spark
A SparkListener that detects whether spills have occurred in Spark jobs.
SpillListener() - Constructor for class org.apache.spark.SpillListener
 
split() - Method in class org.apache.spark.ml.tree.DecisionTreeModelReadWrite.NodeData
 
split() - Method in class org.apache.spark.ml.tree.InternalNode
 
Split - Interface in org.apache.spark.ml.tree
:: DeveloperApi :: Interface for a "Split," which specifies a test made at a decision tree node to choose the left or right path.
split() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
split() - Method in class org.apache.spark.mllib.tree.model.Node
 
Split - Class in org.apache.spark.mllib.tree.model
:: DeveloperApi :: Split applied to a feature param: feature feature index param: threshold Threshold for continuous feature.
Split(int, double, Enumeration.Value, List<Object>) - Constructor for class org.apache.spark.mllib.tree.model.Split
 
split(Column, String) - Static method in class org.apache.spark.sql.functions
Splits str around pattern (pattern is a regular expression).
splitAndCountPartitions(Iterator<String>) - Static method in class org.apache.spark.streaming.util.RawTextHelper
Splits lines and counts the words.
splitAt(int) - Static method in class org.apache.spark.sql.types.StructType
 
splitCommandString(String) - Static method in class org.apache.spark.util.Utils
Split a string of potentially quoted arguments from the command line the way that a shell would do it to determine arguments to a command.
splitIndex() - Method in class org.apache.spark.storage.RDDBlockId
 
SplitInfo - Class in org.apache.spark.scheduler
 
SplitInfo(Class<?>, String, String, long, Object) - Constructor for class org.apache.spark.scheduler.SplitInfo
 
splits() - Method in class org.apache.spark.ml.feature.Bucketizer
Parameter for mapping continuous features into buckets.
spr(double, Vector, DenseVector) - Static method in class org.apache.spark.ml.linalg.BLAS
Adds alpha * x * x.t to a matrix in-place.
spr(double, Vector, double[]) - Static method in class org.apache.spark.ml.linalg.BLAS
Adds alpha * x * x.t to a matrix in-place.
spr(double, Vector, DenseVector) - Static method in class org.apache.spark.mllib.linalg.BLAS
Adds alpha * v * v.t to a matrix in-place.
spr(double, Vector, double[]) - Static method in class org.apache.spark.mllib.linalg.BLAS
Adds alpha * v * v.t to a matrix in-place.
sprand(int, int, double, Random) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a SparseMatrix consisting of i.i.d. gaussian random numbers.
sprand(int, int, double, Random) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
Generate a SparseMatrix consisting of i.i.d.
sprand(int, int, double, Random) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a SparseMatrix consisting of i.i.d. gaussian random numbers.
sprand(int, int, double, Random) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
Generate a SparseMatrix consisting of i.i.d.
sprandn(int, int, double, Random) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a SparseMatrix consisting of i.i.d. gaussian random numbers.
sprandn(int, int, double, Random) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
Generate a SparseMatrix consisting of i.i.d.
sprandn(int, int, double, Random) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a SparseMatrix consisting of i.i.d. gaussian random numbers.
sprandn(int, int, double, Random) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
Generate a SparseMatrix consisting of i.i.d.
sqdist(Vector, Vector) - Static method in class org.apache.spark.ml.linalg.Vectors
Returns the squared distance between two Vectors.
sqdist(Vector, Vector) - Static method in class org.apache.spark.mllib.linalg.Vectors
Returns the squared distance between two Vectors.
sql(String) - Method in class org.apache.spark.sql.SparkSession
Executes a SQL query using Spark, returning the result as a DataFrame.
sql(String) - Method in class org.apache.spark.sql.SQLContext
Executes a SQL query using Spark, returning the result as a DataFrame.
sql() - Method in class org.apache.spark.sql.types.ArrayType
 
sql() - Static method in class org.apache.spark.sql.types.BinaryType
 
sql() - Static method in class org.apache.spark.sql.types.BooleanType
 
sql() - Static method in class org.apache.spark.sql.types.ByteType
 
sql() - Static method in class org.apache.spark.sql.types.CalendarIntervalType
 
sql() - Method in class org.apache.spark.sql.types.DataType
 
sql() - Static method in class org.apache.spark.sql.types.DateType
 
sql() - Method in class org.apache.spark.sql.types.DecimalType
 
sql() - Static method in class org.apache.spark.sql.types.DoubleType
 
sql() - Static method in class org.apache.spark.sql.types.FloatType
 
sql() - Static method in class org.apache.spark.sql.types.IntegerType
 
sql() - Static method in class org.apache.spark.sql.types.LongType
 
sql() - Method in class org.apache.spark.sql.types.MapType
 
sql() - Static method in class org.apache.spark.sql.types.NullType
 
sql() - Static method in class org.apache.spark.sql.types.NumericType
 
sql() - Static method in class org.apache.spark.sql.types.ShortType
 
sql() - Static method in class org.apache.spark.sql.types.StringType
 
sql() - Method in class org.apache.spark.sql.types.StructType
 
sql() - Static method in class org.apache.spark.sql.types.TimestampType
 
sqlContext() - Static method in class org.apache.spark.ml.r.RWrappers
 
sqlContext() - Method in class org.apache.spark.sql.Dataset
 
sqlContext() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
sqlContext() - Method in class org.apache.spark.sql.sources.BaseRelation
 
SQLContext - Class in org.apache.spark.sql
The entry point for working with structured data (rows and columns) in Spark, in Spark 1.x.
SQLContext(SparkContext) - Constructor for class org.apache.spark.sql.SQLContext
 
SQLContext(JavaSparkContext) - Constructor for class org.apache.spark.sql.SQLContext
 
SQLContext.implicits$ - Class in org.apache.spark.sql
:: Experimental :: (Scala-specific) Implicit methods available in Scala for converting common Scala objects into DataFrames.
SQLContext.implicits$() - Constructor for class org.apache.spark.sql.SQLContext.implicits$
 
SQLImplicits - Class in org.apache.spark.sql
A collection of implicit methods for converting common Scala objects into DataFrames.
SQLImplicits() - Constructor for class org.apache.spark.sql.SQLImplicits
 
SQLImplicits.StringToColumn - Class in org.apache.spark.sql
Converts $"col name" into an Column.
SQLImplicits.StringToColumn(StringContext) - Constructor for class org.apache.spark.sql.SQLImplicits.StringToColumn
 
sqlSerDe() - Static method in class org.apache.spark.api.r.SerDe
 
SQLTransformer - Class in org.apache.spark.ml.feature
:: Experimental :: Implements the transformations which are defined by SQL statement.
SQLTransformer(String) - Constructor for class org.apache.spark.ml.feature.SQLTransformer
 
SQLTransformer() - Constructor for class org.apache.spark.ml.feature.SQLTransformer
 
sqlType() - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
SQLUserDefinedType - Annotation Type in org.apache.spark.sql.types
::DeveloperApi:: A user-defined type which can be automatically recognized by a SQLContext and registered.
SQLUtils - Class in org.apache.spark.sql.api.r
 
SQLUtils() - Constructor for class org.apache.spark.sql.api.r.SQLUtils
 
sqrt(Column) - Static method in class org.apache.spark.sql.functions
Computes the square root of the specified float value.
sqrt(String) - Static method in class org.apache.spark.sql.functions
Computes the square root of the specified float value.
SquaredError - Class in org.apache.spark.mllib.tree.loss
:: DeveloperApi :: Class for squared error loss calculation.
SquaredError() - Constructor for class org.apache.spark.mllib.tree.loss.SquaredError
 
SquaredL2Updater - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Updater for L2 regularized problems.
SquaredL2Updater() - Constructor for class org.apache.spark.mllib.optimization.SquaredL2Updater
 
Src - Static variable in class org.apache.spark.graphx.TripletFields
Expose the source and edge fields but not the destination field.
srcAttr() - Method in class org.apache.spark.graphx.EdgeContext
The vertex attribute of the edge's source vertex.
srcAttr() - Method in class org.apache.spark.graphx.EdgeTriplet
The source vertex attribute
srcAttr() - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
srcId() - Method in class org.apache.spark.graphx.Edge
 
srcId() - Method in class org.apache.spark.graphx.EdgeContext
The vertex id of the edge's source vertex.
srcId() - Method in class org.apache.spark.graphx.impl.AggregatingEdgeContext
 
srdd() - Method in class org.apache.spark.api.java.JavaDoubleRDD
 
ssc() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
 
stackTrace() - Method in class org.apache.spark.ExceptionFailure
 
stackTraceFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
stackTraceToJson(StackTraceElement[]) - Static method in class org.apache.spark.util.JsonProtocol
 
stage() - Method in class org.apache.spark.scheduler.AskPermissionToCommitOutput
 
STAGE_DAG() - Static method in class org.apache.spark.ui.ToolTips
 
STAGE_TIMELINE() - Static method in class org.apache.spark.ui.ToolTips
 
stageAttemptId() - Method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
stageAttemptId() - Method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
stageCompletedFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
stageCompletedToJson(SparkListenerStageCompleted) - Static method in class org.apache.spark.util.JsonProtocol
 
StageData - Class in org.apache.spark.status.api.v1
 
stageFailed(String) - Method in class org.apache.spark.scheduler.StageInfo
 
stageId() - Method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
stageId() - Method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
stageId() - Method in class org.apache.spark.scheduler.StageInfo
 
stageId() - Method in interface org.apache.spark.SparkStageInfo
 
stageId() - Method in class org.apache.spark.SparkStageInfoImpl
 
stageId() - Method in class org.apache.spark.status.api.v1.StageData
 
stageId() - Method in class org.apache.spark.TaskContext
The ID of the stage that this task belong to.
stageIds() - Method in class org.apache.spark.scheduler.SparkListenerJobStart
 
stageIds() - Method in interface org.apache.spark.SparkJobInfo
 
stageIds() - Method in class org.apache.spark.SparkJobInfoImpl
 
stageIds() - Method in class org.apache.spark.status.api.v1.JobData
 
stageIds() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
stageIdToActiveJobIds() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
stageIdToData() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
stageIdToInfo() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
stageInfo() - Method in class org.apache.spark.scheduler.SparkListenerStageCompleted
 
stageInfo() - Method in class org.apache.spark.scheduler.SparkListenerStageSubmitted
 
StageInfo - Class in org.apache.spark.scheduler
:: DeveloperApi :: Stores information about a stage to pass from the scheduler to SparkListeners.
StageInfo(int, int, String, int, Seq<RDDInfo>, Seq<Object>, String, TaskMetrics, Seq<Seq<TaskLocation>>) - Constructor for class org.apache.spark.scheduler.StageInfo
 
stageInfoFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
--------------------------------------------------------------------- * JSON deserialization methods for classes SparkListenerEvents depend on |
stageInfos() - Method in class org.apache.spark.scheduler.SparkListenerJobStart
 
stageInfoToJson(StageInfo) - Static method in class org.apache.spark.util.JsonProtocol
------------------------------------------------------------------- * JSON serialization methods for classes SparkListenerEvents depend on |
stages() - Method in class org.apache.spark.ml.Pipeline
param for pipeline stages
stages() - Method in class org.apache.spark.ml.PipelineModel
 
StageStatus - Enum in org.apache.spark.status.api.v1
 
stageSubmittedFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
stageSubmittedToJson(SparkListenerStageSubmitted) - Static method in class org.apache.spark.util.JsonProtocol
 
stagingDir() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
standardization() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
standardization() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
standardization() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
standardization() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
StandardNormalGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
StandardNormalGenerator() - Constructor for class org.apache.spark.mllib.random.StandardNormalGenerator
 
StandardScaler - Class in org.apache.spark.ml.feature
:: Experimental :: Standardizes features by removing the mean and scaling to unit variance using column summary statistics on the samples in the training set.
StandardScaler(String) - Constructor for class org.apache.spark.ml.feature.StandardScaler
 
StandardScaler() - Constructor for class org.apache.spark.ml.feature.StandardScaler
 
StandardScaler - Class in org.apache.spark.mllib.feature
Standardizes features by removing the mean and scaling to unit std using column summary statistics on the samples in the training set.
StandardScaler(boolean, boolean) - Constructor for class org.apache.spark.mllib.feature.StandardScaler
 
StandardScaler() - Constructor for class org.apache.spark.mllib.feature.StandardScaler
 
StandardScalerModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by StandardScaler.
StandardScalerModel - Class in org.apache.spark.mllib.feature
Represents a StandardScaler model that can transform vectors.
StandardScalerModel(Vector, Vector, boolean, boolean) - Constructor for class org.apache.spark.mllib.feature.StandardScalerModel
 
StandardScalerModel(Vector, Vector) - Constructor for class org.apache.spark.mllib.feature.StandardScalerModel
 
StandardScalerModel(Vector) - Constructor for class org.apache.spark.mllib.feature.StandardScalerModel
 
starGraph(SparkContext, int) - Static method in class org.apache.spark.graphx.util.GraphGenerators
Create a star graph with vertex 0 being the center.
start() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Start the execution of the streams.
start() - Method in class org.apache.spark.streaming.dstream.ConstantInputDStream
 
start() - Method in class org.apache.spark.streaming.dstream.InputDStream
Method called to start receiving data.
start() - Method in class org.apache.spark.streaming.dstream.ReceiverInputDStream
 
start() - Method in class org.apache.spark.streaming.StreamingContext
Start the execution of the streams.
startApplication(SparkAppHandle.Listener...) - Method in class org.apache.spark.launcher.SparkLauncher
Starts a Spark application.
startIndexInLevel(int) - Static method in class org.apache.spark.mllib.tree.model.Node
Return the index of the first node in the given level.
startJettyServer(String, int, org.apache.spark.SSLOptions, Seq<ServletContextHandler>, SparkConf, String) - Static method in class org.apache.spark.ui.JettyUtils
Attempt to start a Jetty server bound to the supplied hostName:port using the given context handlers.
startOffset() - Method in exception org.apache.spark.sql.ContinuousQueryException
 
startPosition() - Method in exception org.apache.spark.sql.AnalysisException
 
startServiceOnPort(int, Function1<Object, Tuple2<T, Object>>, SparkConf, String) - Static method in class org.apache.spark.util.Utils
Attempt to start a service on the given port, or fail after a number of attempts.
startStream(String) - Method in class org.apache.spark.sql.DataFrameWriter
:: Experimental :: Starts the execution of the streaming query, which will continually output results to the given path as new data arrives.
startStream() - Method in class org.apache.spark.sql.DataFrameWriter
:: Experimental :: Starts the execution of the streaming query, which will continually output results to the given path as new data arrives.
startsWith(Column) - Method in class org.apache.spark.sql.Column
String starts with.
startsWith(String) - Method in class org.apache.spark.sql.Column
String starts with another string literal.
startsWith(GenSeq<B>) - Static method in class org.apache.spark.sql.types.StructType
 
startsWith(GenSeq<B>, int) - Static method in class org.apache.spark.sql.types.StructType
 
startTime() - Method in class org.apache.spark.api.java.JavaSparkContext
 
startTime() - Method in class org.apache.spark.SparkContext
 
startTime() - Method in class org.apache.spark.status.api.v1.ApplicationAttemptInfo
 
startTime() - Method in class org.apache.spark.streaming.scheduler.OutputOperationInfo
 
startTime() - Method in class org.apache.spark.ui.jobs.JobProgressListener
 
startTime() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorUIData
 
stat() - Method in class org.apache.spark.sql.Dataset
Returns a DataFrameStatFunctions for working statistic functions support.
StatCounter - Class in org.apache.spark.util
A class for tracking the statistics of a set of numbers (count, mean and variance) in a numerically robust way.
StatCounter(TraversableOnce<Object>) - Constructor for class org.apache.spark.util.StatCounter
 
StatCounter() - Constructor for class org.apache.spark.util.StatCounter
Initialize the StatCounter with no values.
state() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate
 
state() - Method in class org.apache.spark.scheduler.local.StatusUpdate
 
State<S> - Class in org.apache.spark.streaming
:: Experimental :: Abstract class for getting and updating the state in mapping function used in the mapWithState operation of a pair DStream (Scala) or a JavaPairDStream (Java).
State() - Constructor for class org.apache.spark.streaming.State
 
stateChanged(SparkAppHandle) - Method in interface org.apache.spark.launcher.SparkAppHandle.Listener
Callback for changes in the handle's state.
statement() - Method in class org.apache.spark.ml.feature.SQLTransformer
SQL statement parameter.
statePrefix() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
stateSnapshots() - Method in class org.apache.spark.streaming.api.java.JavaMapWithStateDStream
 
stateSnapshots() - Method in class org.apache.spark.streaming.dstream.MapWithStateDStream
Return a pair DStream where each RDD is the snapshot of the state of all the keys.
StateSpec<KeyType,ValueType,StateType,MappedType> - Class in org.apache.spark.streaming
:: Experimental :: Abstract class representing all the specifications of the DStream transformation mapWithState operation of a pair DStream (Scala) or a JavaPairDStream (Java).
StateSpec() - Constructor for class org.apache.spark.streaming.StateSpec
 
staticPageRank(int, double) - Method in class org.apache.spark.graphx.GraphOps
Run PageRank for a fixed number of iterations returning a graph with vertex attributes containing the PageRank and edge attributes the normalized edge weight.
staticPersonalizedPageRank(long, int, double) - Method in class org.apache.spark.graphx.GraphOps
Run Personalized PageRank for a fixed number of iterations with with all iterations originating at the source node returning a graph with vertex attributes containing the PageRank and edge attributes the normalized edge weight.
statistic() - Method in class org.apache.spark.mllib.stat.test.ChiSqTestResult
 
statistic() - Method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTestResult
 
statistic() - Method in interface org.apache.spark.mllib.stat.test.TestResult
Test statistic.
Statistics - Class in org.apache.spark.mllib.stat
API for statistical functions in MLlib.
Statistics() - Constructor for class org.apache.spark.mllib.stat.Statistics
 
stats() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return a StatCounter object that captures the mean, variance and count of the RDD's elements in one operation.
stats() - Method in class org.apache.spark.mllib.tree.model.Node
 
stats() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Return a StatCounter object that captures the mean, variance and count of the RDD's elements in one operation.
StatsReportListener - Class in org.apache.spark.scheduler
:: DeveloperApi :: Simple SparkListener that logs a few summary statistics when each stage completes.
StatsReportListener() - Constructor for class org.apache.spark.scheduler.StatsReportListener
 
StatsReportListener - Class in org.apache.spark.streaming.scheduler
:: DeveloperApi :: A simple StreamingListener that logs summary statistics across Spark Streaming batches param: numBatchInfos Number of last batches to consider for generating statistics (default: 10)
StatsReportListener(int) - Constructor for class org.apache.spark.streaming.scheduler.StatsReportListener
 
status() - Method in class org.apache.spark.scheduler.TaskInfo
 
status() - Method in interface org.apache.spark.SparkJobInfo
 
status() - Method in class org.apache.spark.SparkJobInfoImpl
 
status() - Method in class org.apache.spark.status.api.v1.JobData
 
status() - Method in class org.apache.spark.status.api.v1.StageData
 
status() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
statusTracker() - Method in class org.apache.spark.api.java.JavaSparkContext
 
statusTracker() - Method in class org.apache.spark.SparkContext
 
StatusUpdate - Class in org.apache.spark.scheduler.local
 
StatusUpdate(long, Enumeration.Value, ByteBuffer) - Constructor for class org.apache.spark.scheduler.local.StatusUpdate
 
STD() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
std() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
std() - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
std() - Method in class org.apache.spark.mllib.feature.StandardScalerModel
 
std() - Method in class org.apache.spark.mllib.random.LogNormalGenerator
 
stddev(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: alias for stddev_samp.
stddev(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: alias for stddev_samp.
stddev_pop(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the population standard deviation of the expression in a group.
stddev_pop(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the population standard deviation of the expression in a group.
stddev_samp(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sample standard deviation of the expression in a group.
stddev_samp(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sample standard deviation of the expression in a group.
stdev() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute the standard deviation of this RDD's elements.
stdev() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute the standard deviation of this RDD's elements.
stdev() - Method in class org.apache.spark.util.StatCounter
Return the standard deviation of the values.
stepSize() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
stepSize() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
stepSize() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
stepSize() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
stepSize() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
stepSize() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
stepSize() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
stop() - Method in class org.apache.spark.api.java.JavaSparkContext
Shut down the SparkContext.
stop() - Method in interface org.apache.spark.launcher.SparkAppHandle
Asks the application to stop.
stop() - Method in class org.apache.spark.SparkContext
 
stop() - Method in interface org.apache.spark.sql.ContinuousQuery
Stops the execution of this query if it is running.
stop() - Method in class org.apache.spark.sql.SparkSession
Stop the underlying SparkContext.
stop() - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Stop the execution of the streams.
stop(boolean) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Stop the execution of the streams.
stop(boolean, boolean) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Stop the execution of the streams.
stop() - Method in class org.apache.spark.streaming.dstream.ConstantInputDStream
 
stop() - Method in class org.apache.spark.streaming.dstream.InputDStream
Method called to stop receiving data.
stop() - Method in class org.apache.spark.streaming.dstream.ReceiverInputDStream
 
stop(String) - Method in class org.apache.spark.streaming.receiver.Receiver
Stop the receiver completely.
stop(String, Throwable) - Method in class org.apache.spark.streaming.receiver.Receiver
Stop the receiver completely due to an exception
stop(boolean) - Method in class org.apache.spark.streaming.StreamingContext
Stop the execution of the streams immediately (does not wait for all received data to be processed).
stop(boolean, boolean) - Method in class org.apache.spark.streaming.StreamingContext
Stop the execution of the streams, with option of ensuring all received data has been processed.
StopAllReceivers - Class in org.apache.spark.streaming.scheduler
This message will trigger ReceiverTrackerEndpoint to send stop signals to all registered receivers.
StopAllReceivers() - Constructor for class org.apache.spark.streaming.scheduler.StopAllReceivers
 
StopCoordinator - Class in org.apache.spark.scheduler
 
StopCoordinator() - Constructor for class org.apache.spark.scheduler.StopCoordinator
 
StopExecutor - Class in org.apache.spark.scheduler.local
 
StopExecutor() - Constructor for class org.apache.spark.scheduler.local.StopExecutor
 
StopMapOutputTracker - Class in org.apache.spark
 
StopMapOutputTracker() - Constructor for class org.apache.spark.StopMapOutputTracker
 
StopReceiver - Class in org.apache.spark.streaming.receiver
 
StopReceiver() - Constructor for class org.apache.spark.streaming.receiver.StopReceiver
 
stopWords() - Method in class org.apache.spark.ml.feature.StopWordsRemover
The words to be filtered out.
StopWordsRemover - Class in org.apache.spark.ml.feature
:: Experimental :: A feature transformer that filters out stop words from input.
StopWordsRemover(String) - Constructor for class org.apache.spark.ml.feature.StopWordsRemover
 
StopWordsRemover() - Constructor for class org.apache.spark.ml.feature.StopWordsRemover
 
STORAGE_MEMORY() - Static method in class org.apache.spark.ui.ToolTips
 
storageLevel() - Method in class org.apache.spark.status.api.v1.RDDPartitionInfo
 
storageLevel() - Method in class org.apache.spark.status.api.v1.RDDStorageInfo
 
storageLevel() - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
storageLevel() - Method in class org.apache.spark.storage.BlockStatus
 
storageLevel() - Method in class org.apache.spark.storage.BlockUpdatedInfo
 
storageLevel() - Method in class org.apache.spark.storage.RDDInfo
 
StorageLevel - Class in org.apache.spark.storage
:: DeveloperApi :: Flags for controlling the storage of an RDD.
StorageLevel() - Constructor for class org.apache.spark.storage.StorageLevel
 
storageLevel() - Method in class org.apache.spark.streaming.receiver.Receiver
 
storageLevelFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
StorageLevels - Class in org.apache.spark.api.java
Expose some commonly useful storage level constants.
StorageLevels() - Constructor for class org.apache.spark.api.java.StorageLevels
 
storageLevelToJson(StorageLevel) - Static method in class org.apache.spark.util.JsonProtocol
 
StorageListener - Class in org.apache.spark.ui.storage
:: DeveloperApi :: A SparkListener that prepares information to be displayed on the BlockManagerUI.
StorageListener(StorageStatusListener) - Constructor for class org.apache.spark.ui.storage.StorageListener
 
StorageStatus - Class in org.apache.spark.storage
:: DeveloperApi :: Storage information for each BlockManager.
StorageStatus(BlockManagerId, long) - Constructor for class org.apache.spark.storage.StorageStatus
 
StorageStatus(BlockManagerId, long, Map<BlockId, BlockStatus>) - Constructor for class org.apache.spark.storage.StorageStatus
Create a storage status with an initial set of blocks, leaving the source unmodified.
storageStatusList() - Method in class org.apache.spark.storage.StorageStatusListener
 
StorageStatusListener - Class in org.apache.spark.storage
:: DeveloperApi :: A SparkListener that maintains executor storage status.
StorageStatusListener(SparkConf) - Constructor for class org.apache.spark.storage.StorageStatusListener
 
StorageUtils - Class in org.apache.spark.storage
Helper methods for storage-related objects.
StorageUtils() - Constructor for class org.apache.spark.storage.StorageUtils
 
store(T) - Method in class org.apache.spark.streaming.receiver.Receiver
Store a single item of received data to Spark's memory.
store(ArrayBuffer<T>) - Method in class org.apache.spark.streaming.receiver.Receiver
Store an ArrayBuffer of received data as a data block into Spark's memory.
store(ArrayBuffer<T>, Object) - Method in class org.apache.spark.streaming.receiver.Receiver
Store an ArrayBuffer of received data as a data block into Spark's memory.
store(Iterator<T>) - Method in class org.apache.spark.streaming.receiver.Receiver
Store an iterator of received data as a data block into Spark's memory.
store(Iterator<T>, Object) - Method in class org.apache.spark.streaming.receiver.Receiver
Store an iterator of received data as a data block into Spark's memory.
store(Iterator<T>) - Method in class org.apache.spark.streaming.receiver.Receiver
Store an iterator of received data as a data block into Spark's memory.
store(Iterator<T>, Object) - Method in class org.apache.spark.streaming.receiver.Receiver
Store an iterator of received data as a data block into Spark's memory.
store(ByteBuffer) - Method in class org.apache.spark.streaming.receiver.Receiver
Store the bytes of received data as a data block into Spark's memory.
store(ByteBuffer, Object) - Method in class org.apache.spark.streaming.receiver.Receiver
Store the bytes of received data as a data block into Spark's memory.
Strategy - Class in org.apache.spark.mllib.tree.configuration
Stores all the configuration options for tree construction param: algo Learning goal.
Strategy(Enumeration.Value, Impurity, int, int, int, Enumeration.Value, Map<Object, Object>, int, double, int, double, boolean, int) - Constructor for class org.apache.spark.mllib.tree.configuration.Strategy
 
Strategy(Enumeration.Value, Impurity, int, int, int, Map<Integer, Integer>) - Constructor for class org.apache.spark.mllib.tree.configuration.Strategy
Java-friendly constructor for Strategy
StratifiedSamplingUtils - Class in org.apache.spark.util.random
Auxiliary functions and data structures for the sampleByKey method in PairRDDFunctions.
StratifiedSamplingUtils() - Constructor for class org.apache.spark.util.random.StratifiedSamplingUtils
 
stream() - Method in class org.apache.spark.sql.DataFrameReader
:: Experimental :: Loads input data stream in as a DataFrame, for data streams that don't require a path (e.g.
stream(String) - Method in class org.apache.spark.sql.DataFrameReader
:: Experimental :: Loads input in as a DataFrame, for data streams that read from some path.
STREAM() - Static method in class org.apache.spark.storage.BlockId
 
StreamBlockId - Class in org.apache.spark.storage
 
StreamBlockId(int, long) - Constructor for class org.apache.spark.storage.StreamBlockId
 
streamId() - Method in class org.apache.spark.storage.StreamBlockId
 
streamId() - Method in class org.apache.spark.streaming.receiver.Receiver
Get the unique identifier the receiver input stream that this receiver is associated with.
streamId() - Method in class org.apache.spark.streaming.scheduler.ReceiverInfo
 
streamIdToInputInfo() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
 
StreamingContext - Class in org.apache.spark.streaming
Main entry point for Spark Streaming functionality.
StreamingContext(SparkContext, Duration) - Constructor for class org.apache.spark.streaming.StreamingContext
Create a StreamingContext using an existing SparkContext.
StreamingContext(SparkConf, Duration) - Constructor for class org.apache.spark.streaming.StreamingContext
Create a StreamingContext by providing the configuration necessary for a new SparkContext.
StreamingContext(String, String, Duration, String, Seq<String>, Map<String, String>) - Constructor for class org.apache.spark.streaming.StreamingContext
Create a StreamingContext by providing the details necessary for creating a new SparkContext.
StreamingContext(String, Configuration) - Constructor for class org.apache.spark.streaming.StreamingContext
Recreate a StreamingContext from a checkpoint file.
StreamingContext(String) - Constructor for class org.apache.spark.streaming.StreamingContext
Recreate a StreamingContext from a checkpoint file.
StreamingContext(String, SparkContext) - Constructor for class org.apache.spark.streaming.StreamingContext
Recreate a StreamingContext from a checkpoint file using an existing SparkContext.
StreamingContextPythonHelper - Class in org.apache.spark.streaming
 
StreamingContextPythonHelper() - Constructor for class org.apache.spark.streaming.StreamingContextPythonHelper
 
StreamingContextState - Enum in org.apache.spark.streaming
:: DeveloperApi :: Represents the state of a StreamingContext.
StreamingKMeans - Class in org.apache.spark.mllib.clustering
StreamingKMeans provides methods for configuring a streaming k-means analysis, training the model on streaming, and using the model to make predictions on streaming data.
StreamingKMeans(int, double, String) - Constructor for class org.apache.spark.mllib.clustering.StreamingKMeans
 
StreamingKMeans() - Constructor for class org.apache.spark.mllib.clustering.StreamingKMeans
 
StreamingKMeansModel - Class in org.apache.spark.mllib.clustering
StreamingKMeansModel extends MLlib's KMeansModel for streaming algorithms, so it can keep track of a continuously updated weight associated with each cluster, and also update the model by doing a single iteration of the standard k-means algorithm.
StreamingKMeansModel(Vector[], double[]) - Constructor for class org.apache.spark.mllib.clustering.StreamingKMeansModel
 
StreamingLinearAlgorithm<M extends GeneralizedLinearModel,A extends GeneralizedLinearAlgorithm<M>> - Class in org.apache.spark.mllib.regression
:: DeveloperApi :: StreamingLinearAlgorithm implements methods for continuously training a generalized linear model model on streaming data, and using it for prediction on (possibly different) streaming data.
StreamingLinearAlgorithm() - Constructor for class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
 
StreamingLinearRegressionWithSGD - Class in org.apache.spark.mllib.regression
Train or predict a linear regression model on streaming data.
StreamingLinearRegressionWithSGD() - Constructor for class org.apache.spark.mllib.regression.StreamingLinearRegressionWithSGD
Construct a StreamingLinearRegression object with default parameters: {stepSize: 0.1, numIterations: 50, miniBatchFraction: 1.0}.
StreamingListener - Interface in org.apache.spark.streaming.scheduler
:: DeveloperApi :: A listener interface for receiving information about an ongoing streaming computation.
StreamingListenerBatchCompleted - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerBatchCompleted(BatchInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerBatchCompleted
 
StreamingListenerBatchStarted - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerBatchStarted(BatchInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
 
StreamingListenerBatchSubmitted - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerBatchSubmitted(BatchInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
 
StreamingListenerEvent - Interface in org.apache.spark.streaming.scheduler
:: DeveloperApi :: Base trait for events related to StreamingListener
StreamingListenerOutputOperationCompleted - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerOutputOperationCompleted(OutputOperationInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationCompleted
 
StreamingListenerOutputOperationStarted - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerOutputOperationStarted(OutputOperationInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerOutputOperationStarted
 
StreamingListenerReceiverError - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerReceiverError(ReceiverInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerReceiverError
 
StreamingListenerReceiverStarted - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerReceiverStarted(ReceiverInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
 
StreamingListenerReceiverStopped - Class in org.apache.spark.streaming.scheduler
 
StreamingListenerReceiverStopped(ReceiverInfo) - Constructor for class org.apache.spark.streaming.scheduler.StreamingListenerReceiverStopped
 
StreamingLogisticRegressionWithSGD - Class in org.apache.spark.mllib.classification
Train or predict a logistic regression model on streaming data.
StreamingLogisticRegressionWithSGD() - Constructor for class org.apache.spark.mllib.classification.StreamingLogisticRegressionWithSGD
Construct a StreamingLogisticRegression object with default parameters: {stepSize: 0.1, numIterations: 50, miniBatchFraction: 1.0, regParam: 0.0}.
StreamingTest - Class in org.apache.spark.mllib.stat.test
:: Experimental :: Performs online 2-sample significance testing for a stream of (Boolean, Double) pairs.
StreamingTest() - Constructor for class org.apache.spark.mllib.stat.test.StreamingTest
 
StreamInputInfo - Class in org.apache.spark.streaming.scheduler
:: DeveloperApi :: Track the information of input stream at specified batch time.
StreamInputInfo(int, long, Map<String, Object>) - Constructor for class org.apache.spark.streaming.scheduler.StreamInputInfo
 
streams() - Method in class org.apache.spark.sql.SparkSession
Returns a ContinuousQueryManager that allows managing all the ContinuousQueries active on this.
streams() - Method in class org.apache.spark.sql.SQLContext
Returns a ContinuousQueryManager that allows managing all the ContinuousQueries active on this context.
StreamSinkProvider - Interface in org.apache.spark.sql.sources
Implemented by objects that can produce a streaming Sink for a specific format or system.
StreamSourceProvider - Interface in org.apache.spark.sql.sources
Implemented by objects that can produce a streaming Source for a specific format or system.
STRING() - Static method in class org.apache.spark.api.r.SerializationFormats
 
string() - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type string.
STRING() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable string type.
stringArgs() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
StringArrayParam - Class in org.apache.spark.ml.param
:: DeveloperApi :: Specialized version of Param[Array[String} for Java.
StringArrayParam(Params, String, String, Function1<String[], Object>) - Constructor for class org.apache.spark.ml.param.StringArrayParam
 
StringArrayParam(Params, String, String) - Constructor for class org.apache.spark.ml.param.StringArrayParam
 
StringContains - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a string that contains the string value.
StringContains(String, String) - Constructor for class org.apache.spark.sql.sources.StringContains
 
StringEndsWith - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a string that starts with value.
StringEndsWith(String, String) - Constructor for class org.apache.spark.sql.sources.StringEndsWith
 
StringIndexer - Class in org.apache.spark.ml.feature
:: Experimental :: A label indexer that maps a string column of labels to an ML column of label indices.
StringIndexer(String) - Constructor for class org.apache.spark.ml.feature.StringIndexer
 
StringIndexer() - Constructor for class org.apache.spark.ml.feature.StringIndexer
 
StringIndexerModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by StringIndexer.
StringIndexerModel(String, String[]) - Constructor for class org.apache.spark.ml.feature.StringIndexerModel
 
StringIndexerModel(String[]) - Constructor for class org.apache.spark.ml.feature.StringIndexerModel
 
stringPrefix() - Static method in class org.apache.spark.sql.types.StructType
 
StringRRDD<T> - Class in org.apache.spark.api.r
An RDD that stores R objects as Array[String].
StringRRDD(RDD<T>, byte[], String, byte[], Object[], ClassTag<T>) - Constructor for class org.apache.spark.api.r.StringRRDD
 
StringStartsWith - Class in org.apache.spark.sql.sources
A filter that evaluates to true iff the attribute evaluates to a string that starts with value.
StringStartsWith(String, String) - Constructor for class org.apache.spark.sql.sources.StringStartsWith
 
stringToSeq(String, Function1<String, T>) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
StringType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the StringType object.
StringType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing String values.
stripDirectory(String) - Static method in class org.apache.spark.util.Utils
Strip the directory from a path name
stronglyConnectedComponents(int) - Method in class org.apache.spark.graphx.GraphOps
Compute the strongly connected component (SCC) of each vertex and return a graph with the vertex value containing the lowest vertex id in the SCC containing that vertex.
StronglyConnectedComponents - Class in org.apache.spark.graphx.lib
Strongly connected components algorithm implementation.
StronglyConnectedComponents() - Constructor for class org.apache.spark.graphx.lib.StronglyConnectedComponents
 
struct(Seq<StructField>) - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type struct.
struct(StructType) - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type struct.
struct(Column...) - Static method in class org.apache.spark.sql.functions
Creates a new struct column.
struct(String, String...) - Static method in class org.apache.spark.sql.functions
Creates a new struct column that composes multiple input columns.
struct(Seq<Column>) - Static method in class org.apache.spark.sql.functions
Creates a new struct column.
struct(String, Seq<String>) - Static method in class org.apache.spark.sql.functions
Creates a new struct column that composes multiple input columns.
StructField - Class in org.apache.spark.sql.types
A field inside a StructType.
StructField(String, DataType, boolean, Metadata) - Constructor for class org.apache.spark.sql.types.StructField
 
StructField() - Constructor for class org.apache.spark.sql.types.StructField
No-arg constructor for kryo.
StructType - Class in org.apache.spark.sql.types
:: DeveloperApi :: A StructType object can be constructed by
StructType(StructField[]) - Constructor for class org.apache.spark.sql.types.StructType
 
StructType() - Constructor for class org.apache.spark.sql.types.StructType
No-arg constructor for kryo.
StudentTTest - Class in org.apache.spark.mllib.stat.test
Performs Students's 2-sample t-test.
StudentTTest() - Constructor for class org.apache.spark.mllib.stat.test.StudentTTest
 
subexpressionEliminationEnabled() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
subgraph(Function1<EdgeTriplet<VD, ED>, Object>, Function2<Object, VD, Object>) - Method in class org.apache.spark.graphx.Graph
Restricts the graph to only the vertices and edges satisfying the predicates.
subgraph(Function1<EdgeTriplet<VD, ED>, Object>, Function2<Object, VD, Object>) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
subgraph$default$1() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
subgraph$default$2() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
submissionTime() - Method in class org.apache.spark.scheduler.StageInfo
When this stage was submitted from the DAGScheduler to a TaskScheduler.
submissionTime() - Method in interface org.apache.spark.SparkStageInfo
 
submissionTime() - Method in class org.apache.spark.SparkStageInfoImpl
 
submissionTime() - Method in class org.apache.spark.status.api.v1.JobData
 
submissionTime() - Method in class org.apache.spark.status.api.v1.StageData
 
submissionTime() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
 
submissionTime() - Method in class org.apache.spark.ui.jobs.UIData.JobUIData
 
submitJob(RDD<T>, Function1<Iterator<T>, U>, Seq<Object>, Function2<Object, U, BoxedUnit>, Function0<R>) - Method in interface org.apache.spark.JobSubmitter
Submit a job for execution and return a FutureAction holding the result.
submitJob(RDD<T>, Function1<Iterator<T>, U>, Seq<Object>, Function2<Object, U, BoxedUnit>, Function0<R>) - Method in class org.apache.spark.SparkContext
Submit a job for execution and return a FutureJob holding the result.
subqueries() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
subsamplingRate() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
subsamplingRate() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
subsamplingRate() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
subsamplingRate() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
subsamplingRate() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
subsamplingRate() - Static method in class org.apache.spark.ml.clustering.LDA
 
subsamplingRate() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
subsamplingRate() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
subsamplingRate() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
subsamplingRate() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
subsamplingRate() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
subsamplingRate() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
subsetAccuracy() - Method in class org.apache.spark.mllib.evaluation.MultilabelMetrics
Returns subset accuracy (for equal sets of labels)
substitute(String) - Method in class org.apache.spark.sql.internal.VariableSubstitution
Given a query, does variable substitution and return the result.
substr(Column, Column) - Method in class org.apache.spark.sql.Column
An expression that returns a substring.
substr(int, int) - Method in class org.apache.spark.sql.Column
An expression that returns a substring.
substring(Column, int, int) - Static method in class org.apache.spark.sql.functions
Substring starts at pos and is of length len when str is String type or returns the slice of byte array that starts at pos in byte and is of length len when str is Binary type
substring_index(Column, String, int) - Static method in class org.apache.spark.sql.functions
Returns the substring from string str before count occurrences of the delimiter delim.
subtract(JavaDoubleRDD) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaDoubleRDD, int) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaDoubleRDD, Partitioner) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaPairRDD<K, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaPairRDD<K, V>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaPairRDD<K, V>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaRDD<T>) - Method in class org.apache.spark.api.java.JavaRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaRDD<T>, int) - Method in class org.apache.spark.api.java.JavaRDD
Return an RDD with the elements from this that are not in other.
subtract(JavaRDD<T>, Partitioner) - Method in class org.apache.spark.api.java.JavaRDD
Return an RDD with the elements from this that are not in other.
subtract(RDD<T>) - Static method in class org.apache.spark.api.r.RRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.api.r.RRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
subtract(RDD<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
subtract(RDD<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
subtract(RDD<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
subtract(RDD<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.graphx.VertexRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
subtract(BlockMatrix) - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Subtracts the given block matrix other from this block matrix: this - other.
subtract(RDD<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
subtract(RDD<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
subtract(RDD<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
subtract(RDD<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
subtract(RDD<T>, int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
subtract(RDD<T>, Partitioner, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
subtract(RDD<T>) - Method in class org.apache.spark.rdd.RDD
Return an RDD with the elements from this that are not in other.
subtract(RDD<T>, int) - Method in class org.apache.spark.rdd.RDD
Return an RDD with the elements from this that are not in other.
subtract(RDD<T>, Partitioner, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Return an RDD with the elements from this that are not in other.
subtract(long, long) - Static method in class org.apache.spark.streaming.util.RawTextHelper
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.api.r.RRDD
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.EdgeRDD
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.graphx.VertexRDD
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.HadoopRDD
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.JdbcRDD
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
subtract$default$3(RDD<T>, Partitioner) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
subtractByKey(JavaPairRDD<K, W>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the pairs from this whose keys are not in other.
subtractByKey(JavaPairRDD<K, W>, int) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the pairs from `this` whose keys are not in `other`.
subtractByKey(JavaPairRDD<K, W>, Partitioner) - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the pairs from `this` whose keys are not in `other`.
subtractByKey(RDD<Tuple2<K, W>>, ClassTag<W>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD with the pairs from this whose keys are not in other.
subtractByKey(RDD<Tuple2<K, W>>, int, ClassTag<W>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD with the pairs from `this` whose keys are not in `other`.
subtractByKey(RDD<Tuple2<K, W>>, Partitioner, ClassTag<W>) - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD with the pairs from `this` whose keys are not in `other`.
subtreeToString$default$1() - Static method in class org.apache.spark.ml.tree.InternalNode
 
succeededTasks() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
succeededTasks() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
success(T) - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
Success - Class in org.apache.spark
:: DeveloperApi :: Task succeeded.
Success() - Constructor for class org.apache.spark.Success
 
successful() - Method in class org.apache.spark.scheduler.TaskInfo
 
sum() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Add up the elements in this RDD.
Sum() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
sum() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Add up the elements in this RDD.
sum(MapFunction<T, Double>) - Static method in class org.apache.spark.sql.expressions.javalang.typed
Sum aggregate function for floating point (double) type.
sum(Function1<IN, Object>) - Static method in class org.apache.spark.sql.expressions.scalalang.typed
Sum aggregate function for floating point (double) type.
sum(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sum of all values in the expression.
sum(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sum of all values in the given column.
sum(String...) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the sum for each numeric columns for each group.
sum(Seq<String>) - Method in class org.apache.spark.sql.RelationalGroupedDataset
Compute the sum for each numeric columns for each group.
sum(Numeric<B>) - Static method in class org.apache.spark.sql.types.StructType
 
sum() - Method in class org.apache.spark.util.DoubleAccumulator
Returns the sum of elements added to the accumulator.
sum() - Method in class org.apache.spark.util.LongAccumulator
Returns the sum of elements added to the accumulator.
sum() - Method in class org.apache.spark.util.StatCounter
 
sumApprox(long, Double) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Approximate operation to return the sum within a timeout.
sumApprox(long) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Approximate operation to return the sum within a timeout.
sumApprox(long, double) - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Approximate operation to return the sum within a timeout.
sumDistinct(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sum of distinct values in the expression.
sumDistinct(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the sum of distinct values in the expression.
sumLong(MapFunction<T, Long>) - Static method in class org.apache.spark.sql.expressions.javalang.typed
Sum aggregate function for integral (long, i.e.
sumLong(Function1<IN, Object>) - Static method in class org.apache.spark.sql.expressions.scalalang.typed
Sum aggregate function for integral (long, i.e.
summary() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
Gets summary of model on training set.
summary() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
Gets summary of model on training set.
summary() - Method in class org.apache.spark.ml.clustering.KMeansModel
Gets summary of model on training set.
summary() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
Gets R-like summary of model on training set.
summary() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
Gets summary (e.g.
supportedFeatureSubsetStrategies() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
Accessor for supported featureSubsetStrategy settings: auto, all, onethird, sqrt, log2
supportedFeatureSubsetStrategies() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
Accessor for supported featureSubsetStrategy settings: auto, all, onethird, sqrt, log2
supportedFeatureSubsetStrategies() - Static method in class org.apache.spark.mllib.tree.RandomForest
List of supported feature subset sampling strategies.
supportedImpurities() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
Accessor for supported impurities: entropy, gini
supportedImpurities() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
Accessor for supported impurity settings: entropy, gini
supportedImpurities() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
Accessor for supported impurities: variance
supportedImpurities() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
Accessor for supported impurity settings: variance
supportedLossTypes() - Static method in class org.apache.spark.ml.classification.GBTClassifier
Accessor for supported loss settings: logistic
supportedLossTypes() - Static method in class org.apache.spark.ml.regression.GBTRegressor
Accessor for supported loss settings: squared (L2), absolute (L1)
supportedOptimizers() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
supportedOptimizers() - Static method in class org.apache.spark.ml.clustering.LDA
 
supportedOptimizers() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
SVDPlusPlus - Class in org.apache.spark.graphx.lib
Implementation of SVD++ algorithm.
SVDPlusPlus() - Constructor for class org.apache.spark.graphx.lib.SVDPlusPlus
 
SVDPlusPlus.Conf - Class in org.apache.spark.graphx.lib
Configuration parameters for SVDPlusPlus.
SVDPlusPlus.Conf(int, int, double, double, double, double, double, double) - Constructor for class org.apache.spark.graphx.lib.SVDPlusPlus.Conf
 
SVMDataGenerator - Class in org.apache.spark.mllib.util
:: DeveloperApi :: Generate sample data used for SVM.
SVMDataGenerator() - Constructor for class org.apache.spark.mllib.util.SVMDataGenerator
 
SVMModel - Class in org.apache.spark.mllib.classification
Model for Support Vector Machines (SVMs).
SVMModel(Vector, double) - Constructor for class org.apache.spark.mllib.classification.SVMModel
 
SVMWithSGD - Class in org.apache.spark.mllib.classification
Train a Support Vector Machine (SVM) using Stochastic Gradient Descent.
SVMWithSGD() - Constructor for class org.apache.spark.mllib.classification.SVMWithSGD
Construct a SVM object with default parameters: {stepSize: 1.0, numIterations: 100, regParm: 0.01, miniBatchFraction: 1.0}.
symbolToColumn(Symbol) - Method in class org.apache.spark.sql.SQLImplicits
An implicit conversion that turns a Scala Symbol into a Column.
symlink(File, File) - Static method in class org.apache.spark.util.Utils
Creates a symlink.
symmetricEigs(Function1<DenseVector<Object>, DenseVector<Object>>, int, int, double, int) - Static method in class org.apache.spark.mllib.linalg.EigenValueDecomposition
Compute the leading k eigenvalues and eigenvectors on a symmetric square matrix using ARPACK.
syr(double, Vector, DenseMatrix) - Static method in class org.apache.spark.ml.linalg.BLAS
A := alpha * x * x^T^ + A
syr(double, Vector, DenseMatrix) - Static method in class org.apache.spark.mllib.linalg.BLAS
A := alpha * x * x^T^ + A
SYSTEM_DEFAULT() - Static method in class org.apache.spark.sql.types.DecimalType
 
systemProperties() - Method in class org.apache.spark.ui.env.EnvironmentListener
 

T

t() - Method in class org.apache.spark.SerializableWritable
 
Table - Class in org.apache.spark.sql.catalog
 
Table(String, String, String, String, boolean) - Constructor for class org.apache.spark.sql.catalog.Table
 
table(String) - Method in class org.apache.spark.sql.DataFrameReader
Returns the specified table as a DataFrame.
table() - Method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
table(String) - Method in class org.apache.spark.sql.SparkSession
Returns the specified table as a DataFrame.
table(TableIdentifier) - Method in class org.apache.spark.sql.SparkSession
 
table(String) - Method in class org.apache.spark.sql.SQLContext
Returns the specified table as a DataFrame.
TABLE_CLASS_NOT_STRIPED() - Static method in class org.apache.spark.ui.UIUtils
 
TABLE_CLASS_STRIPED() - Static method in class org.apache.spark.ui.UIUtils
 
TABLE_CLASS_STRIPED_SORTABLE() - Static method in class org.apache.spark.ui.UIUtils
 
tableNames() - Method in class org.apache.spark.sql.SQLContext
Returns the names of tables in the current database as an array.
tableNames(String) - Method in class org.apache.spark.sql.SQLContext
Returns the names of tables in the given database as an array.
tables() - Method in class org.apache.spark.sql.SQLContext
Returns a DataFrame containing names of existing tables in the current database.
tables(String) - Method in class org.apache.spark.sql.SQLContext
Returns a DataFrame containing names of existing tables in the given database.
TableScan - Interface in org.apache.spark.sql.sources
::DeveloperApi:: A BaseRelation that can produce all of its tuples as an RDD of Row objects.
tableType() - Method in class org.apache.spark.sql.catalog.Table
 
tail() - Static method in class org.apache.spark.sql.types.StructType
 
tails() - Static method in class org.apache.spark.sql.types.StructType
 
take(int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
take(int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
take(int) - Static method in class org.apache.spark.api.java.JavaRDD
 
take(int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Take the first num elements of the RDD.
take(int) - Static method in class org.apache.spark.api.r.RRDD
 
take(int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
take(int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
take(int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
take(int) - Static method in class org.apache.spark.graphx.VertexRDD
 
take(int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
take(int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
take(int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
take(int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
take(int) - Method in class org.apache.spark.rdd.RDD
Take the first num elements of the RDD.
take(int) - Method in class org.apache.spark.sql.Dataset
Returns the first n rows in the Dataset.
take(int) - Static method in class org.apache.spark.sql.types.StructType
 
takeAsList(int) - Method in class org.apache.spark.sql.Dataset
Returns the first n rows in the Dataset as a list.
takeAsync(int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
takeAsync(int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
takeAsync(int) - Static method in class org.apache.spark.api.java.JavaRDD
 
takeAsync(int) - Method in interface org.apache.spark.api.java.JavaRDDLike
The asynchronous version of the take action, which returns a future for retrieving the first num elements of this RDD.
takeAsync(int) - Method in class org.apache.spark.rdd.AsyncRDDActions
Returns a future for retrieving the first num elements of the RDD.
takeOrdered(int, Comparator<T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
takeOrdered(int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
takeOrdered(int, Comparator<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
takeOrdered(int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
takeOrdered(int, Comparator<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
takeOrdered(int) - Static method in class org.apache.spark.api.java.JavaRDD
 
takeOrdered(int, Comparator<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Returns the first k (smallest) elements from this RDD as defined by the specified Comparator[T] and maintains the order.
takeOrdered(int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Returns the first k (smallest) elements from this RDD using the natural ordering for T while maintain the order.
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
takeOrdered(int, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
takeOrdered(int, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Returns the first k (smallest) elements from this RDD as defined by the specified implicit Ordering[T] and maintains the ordering.
takeRight(int) - Static method in class org.apache.spark.sql.types.StructType
 
takeSample(boolean, int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
takeSample(boolean, int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
takeSample(boolean, int) - Static method in class org.apache.spark.api.java.JavaRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.api.java.JavaRDD
 
takeSample(boolean, int) - Method in interface org.apache.spark.api.java.JavaRDDLike
 
takeSample(boolean, int, long) - Method in interface org.apache.spark.api.java.JavaRDDLike
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.api.r.RRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.graphx.EdgeRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.graphx.VertexRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.rdd.HadoopRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.rdd.JdbcRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
takeSample(boolean, int, long) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
takeSample(boolean, int, long) - Method in class org.apache.spark.rdd.RDD
Return a fixed-size sampled subset of this RDD in an array
takeSample$default$3() - Static method in class org.apache.spark.api.r.RRDD
 
takeSample$default$3() - Static method in class org.apache.spark.graphx.EdgeRDD
 
takeSample$default$3() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
takeSample$default$3() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
takeSample$default$3() - Static method in class org.apache.spark.graphx.VertexRDD
 
takeSample$default$3() - Static method in class org.apache.spark.rdd.HadoopRDD
 
takeSample$default$3() - Static method in class org.apache.spark.rdd.JdbcRDD
 
takeSample$default$3() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
takeSample$default$3() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
takeWhile(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
tallSkinnyQR(boolean) - Method in class org.apache.spark.mllib.linalg.distributed.RowMatrix
Compute QR decomposition for RowMatrix.
tan(Column) - Static method in class org.apache.spark.sql.functions
Computes the tangent of the given value.
tan(String) - Static method in class org.apache.spark.sql.functions
Computes the tangent of the given column.
tanh(Column) - Static method in class org.apache.spark.sql.functions
Computes the hyperbolic tangent of the given value.
tanh(String) - Static method in class org.apache.spark.sql.functions
Computes the hyperbolic tangent of the given column.
targetStorageLevel() - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
targetStorageLevel() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
task() - Method in class org.apache.spark.CleanupTaskWeakReference
 
TASK_DESERIALIZATION_TIME() - Static method in class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
TASK_DESERIALIZATION_TIME() - Static method in class org.apache.spark.ui.ToolTips
 
TASK_TIME() - Static method in class org.apache.spark.ui.ToolTips
 
taskAttemptId() - Method in class org.apache.spark.TaskContext
An ID that is unique to this task attempt (within the same SparkContext, no two task attempts will share the same attempt ID).
TaskCommitDenied - Class in org.apache.spark
:: DeveloperApi :: Task requested the driver to commit, but was denied.
TaskCommitDenied(int, int, int) - Constructor for class org.apache.spark.TaskCommitDenied
 
TaskCompletionListener - Interface in org.apache.spark.util
:: DeveloperApi ::
TaskContext - Class in org.apache.spark
Contextual information about a task which can be read or mutated during execution.
TaskContext() - Constructor for class org.apache.spark.TaskContext
 
TaskData - Class in org.apache.spark.status.api.v1
 
taskData() - Method in class org.apache.spark.ui.jobs.UIData.StageUIData
 
TaskDetailsClassNames - Class in org.apache.spark.ui.jobs
Names of the CSS classes corresponding to each type of task detail.
TaskDetailsClassNames() - Constructor for class org.apache.spark.ui.jobs.TaskDetailsClassNames
 
taskEndFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
TaskEndReason - Interface in org.apache.spark
:: DeveloperApi :: Various possible reasons why a task ended.
taskEndReasonFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
taskEndReasonToJson(TaskEndReason) - Static method in class org.apache.spark.util.JsonProtocol
 
taskEndToJson(SparkListenerTaskEnd) - Static method in class org.apache.spark.util.JsonProtocol
 
TaskFailedReason - Interface in org.apache.spark
:: DeveloperApi :: Various possible reasons why a task failed.
TaskFailureListener - Interface in org.apache.spark.util
:: DeveloperApi ::
taskFailures() - Method in class org.apache.spark.scheduler.cluster.mesos.Slave
 
taskGettingResultFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
taskGettingResultToJson(SparkListenerTaskGettingResult) - Static method in class org.apache.spark.util.JsonProtocol
 
taskId() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.KillTask
 
taskId() - Method in class org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.StatusUpdate
 
taskId() - Method in class org.apache.spark.scheduler.local.KillTask
 
taskId() - Method in class org.apache.spark.scheduler.local.StatusUpdate
 
taskId() - Method in class org.apache.spark.scheduler.TaskInfo
 
taskId() - Method in class org.apache.spark.status.api.v1.TaskData
 
taskId() - Method in class org.apache.spark.storage.TaskResultBlockId
 
taskIDs() - Method in class org.apache.spark.scheduler.cluster.mesos.Slave
 
taskInfo() - Method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
taskInfo() - Method in class org.apache.spark.scheduler.SparkListenerTaskGettingResult
 
taskInfo() - Method in class org.apache.spark.scheduler.SparkListenerTaskStart
 
TaskInfo - Class in org.apache.spark.scheduler
:: DeveloperApi :: Information about a running task attempt inside a TaskSet.
TaskInfo(long, int, int, long, String, String, Enumeration.Value, boolean) - Constructor for class org.apache.spark.scheduler.TaskInfo
 
taskInfo() - Method in class org.apache.spark.ui.jobs.UIData.TaskUIData
 
taskInfoFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
taskInfoToJson(TaskInfo) - Static method in class org.apache.spark.util.JsonProtocol
 
TaskKilled - Class in org.apache.spark
:: DeveloperApi :: Task was killed intentionally and needs to be rescheduled.
TaskKilled() - Constructor for class org.apache.spark.TaskKilled
 
TaskKilledException - Exception in org.apache.spark
:: DeveloperApi :: Exception thrown when a task is explicitly killed (i.e., task failure is expected).
TaskKilledException() - Constructor for exception org.apache.spark.TaskKilledException
 
taskLocality() - Method in class org.apache.spark.scheduler.TaskInfo
 
TaskLocality - Class in org.apache.spark.scheduler
 
TaskLocality() - Constructor for class org.apache.spark.scheduler.TaskLocality
 
taskLocality() - Method in class org.apache.spark.status.api.v1.TaskData
 
TaskMetricDistributions - Class in org.apache.spark.status.api.v1
 
taskMetrics() - Method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
taskMetrics() - Method in class org.apache.spark.scheduler.StageInfo
 
taskMetrics() - Method in class org.apache.spark.status.api.v1.TaskData
 
TaskMetrics - Class in org.apache.spark.status.api.v1
 
taskMetrics() - Method in class org.apache.spark.TaskContext
 
taskMetricsFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
taskMetricsToJson(TaskMetrics) - Static method in class org.apache.spark.util.JsonProtocol
 
TASKRESULT() - Static method in class org.apache.spark.storage.BlockId
 
TaskResultBlockId - Class in org.apache.spark.storage
 
TaskResultBlockId(long) - Constructor for class org.apache.spark.storage.TaskResultBlockId
 
TaskResultLost - Class in org.apache.spark
:: DeveloperApi :: The task finished successfully, but the result was lost from the executor's block manager before it was fetched.
TaskResultLost() - Constructor for class org.apache.spark.TaskResultLost
 
tasks() - Method in class org.apache.spark.status.api.v1.StageData
 
TaskSchedulerIsSet - Class in org.apache.spark
An event that SparkContext uses to notify HeartbeatReceiver that SparkContext.taskScheduler is created.
TaskSchedulerIsSet() - Constructor for class org.apache.spark.TaskSchedulerIsSet
 
TaskSorting - Enum in org.apache.spark.status.api.v1
 
taskStartFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
taskStartToJson(SparkListenerTaskStart) - Static method in class org.apache.spark.util.JsonProtocol
 
TaskState - Class in org.apache.spark
 
TaskState() - Constructor for class org.apache.spark.TaskState
 
taskTime() - Method in class org.apache.spark.status.api.v1.ExecutorStageSummary
 
taskTime() - Method in class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
taskType() - Method in class org.apache.spark.scheduler.SparkListenerTaskEnd
 
TEMP_DIR_SHUTDOWN_PRIORITY() - Static method in class org.apache.spark.util.ShutdownHookManager
The shutdown priority of temp directory must be lower than the SparkContext shutdown priority.
tempFileWith(File) - Static method in class org.apache.spark.util.Utils
Returns a path of temporary file which is in the same directory with path.
terminateProcess(Process, long) - Static method in class org.apache.spark.util.Utils
Terminates a process waiting for at most the specified duration.
TEST() - Static method in class org.apache.spark.storage.BlockId
 
TEST_ACCUM() - Static method in class org.apache.spark.InternalAccumulator
 
testOneSample(RDD<Object>, String, double...) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
A convenience function that allows running the KS test for 1 set of sample data against a named distribution
testOneSample(RDD<Object>, Function1<Object, Object>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
testOneSample(RDD<Object>, RealDistribution) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
testOneSample(RDD<Object>, String, Seq<Object>) - Static method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTest
 
TestResult<DF> - Interface in org.apache.spark.mllib.stat.test
Trait for hypothesis test results.
TestUtils - Class in org.apache.spark
Utilities for tests.
TestUtils() - Constructor for class org.apache.spark.TestUtils
 
text(String...) - Method in class org.apache.spark.sql.DataFrameReader
Loads a text file and returns a Dataset of String.
text(Seq<String>) - Method in class org.apache.spark.sql.DataFrameReader
Loads a text file and returns a Dataset of String.
text(String) - Method in class org.apache.spark.sql.DataFrameWriter
Saves the content of the DataFrame in a text file at the specified path.
textFile(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Read a text file from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI, and return it as an RDD of Strings.
textFile(String, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Read a text file from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI, and return it as an RDD of Strings.
textFile(String, int) - Method in class org.apache.spark.SparkContext
Read a text file from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI, and return it as an RDD of Strings.
textFileStream(String) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create an input stream that monitors a Hadoop-compatible filesystem for new files and reads them as text files (using key as LongWritable, value as Text and input format as TextInputFormat).
textFileStream(String) - Method in class org.apache.spark.streaming.StreamingContext
Create a input stream that monitors a Hadoop-compatible filesystem for new files and reads them as text files (using key as LongWritable, value as Text and input format as TextInputFormat).
textResponderToServlet(Function1<HttpServletRequest, String>) - Static method in class org.apache.spark.ui.JettyUtils
 
theta() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
theta() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$.Data
 
theta() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$.Data
 
theta() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel
 
thisClassName() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$
Hard-code class name string in case it changes in the future
thisClassName() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$
Hard-code class name string in case it changes in the future
thisClassName() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
 
thisCollection() - Static method in class org.apache.spark.sql.types.StructType
 
thisFormatVersion() - Method in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$
 
thisFormatVersion() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV1_0$
 
thisFormatVersion() - Method in class org.apache.spark.mllib.classification.NaiveBayesModel.SaveLoadV2_0$
 
thisFormatVersion() - Method in class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$
 
thisFormatVersion() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$
 
ThreadUtils - Class in org.apache.spark.util
 
ThreadUtils() - Constructor for class org.apache.spark.util.ThreadUtils
 
threshold() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
threshold() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
threshold() - Method in class org.apache.spark.ml.feature.Binarizer
Param for threshold used to binarize continuous features.
threshold() - Method in class org.apache.spark.ml.tree.ContinuousSplit
 
threshold() - Method in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$.Data
 
threshold() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.SplitData
 
threshold() - Method in class org.apache.spark.mllib.tree.model.Split
 
thresholds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
thresholds() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
thresholds() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
thresholds() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
thresholds() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
thresholds() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
thresholds() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
thresholds() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
thresholds() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
thresholds() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Returns thresholds in descending order.
throwBalls(int, RDD<?>, double, DefaultPartitionCoalescer.PartitionLocations) - Method in class org.apache.spark.rdd.DefaultPartitionCoalescer
 
time() - Method in class org.apache.spark.scheduler.SparkListenerApplicationEnd
 
time() - Method in class org.apache.spark.scheduler.SparkListenerApplicationStart
 
time() - Method in class org.apache.spark.scheduler.SparkListenerBlockManagerAdded
 
time() - Method in class org.apache.spark.scheduler.SparkListenerBlockManagerRemoved
 
time() - Method in class org.apache.spark.scheduler.SparkListenerExecutorAdded
 
time() - Method in class org.apache.spark.scheduler.SparkListenerExecutorRemoved
 
time() - Method in class org.apache.spark.scheduler.SparkListenerJobEnd
 
time() - Method in class org.apache.spark.scheduler.SparkListenerJobStart
 
time() - Method in exception org.apache.spark.sql.ContinuousQueryException
Time when the exception occurred
Time - Class in org.apache.spark.streaming
This is a simple class that represents an absolute instant of time.
Time(long) - Constructor for class org.apache.spark.streaming.Time
 
timeFromString(String, TimeUnit) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
timeIt(int, Function0<BoxedUnit>, Option<Function0<BoxedUnit>>) - Static method in class org.apache.spark.util.Utils
Timing method based on iterations that permit JVM JIT optimization.
timeout(Duration) - Method in class org.apache.spark.streaming.StateSpec
Set the duration after which the state of an idle key will be removed.
times(int) - Method in class org.apache.spark.streaming.Duration
 
times(int, Function0<BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Method executed for repeating a task for side effects.
timestamp() - Method in class org.apache.spark.sql.ColumnName
Creates a new StructField of type timestamp.
TIMESTAMP() - Static method in class org.apache.spark.sql.Encoders
An encoder for nullable timestamp type.
TimestampType - Static variable in class org.apache.spark.sql.types.DataTypes
Gets the TimestampType object.
TimestampType - Class in org.apache.spark.sql.types
:: DeveloperApi :: The data type representing java.sql.Timestamp values.
timeStringAsMs(String) - Static method in class org.apache.spark.util.Utils
Convert a time parameter such as (50s, 100ms, or 250us) to microseconds for internal use.
timeStringAsSeconds(String) - Static method in class org.apache.spark.util.Utils
Convert a time parameter such as (50s, 100ms, or 250us) to seconds for internal use.
timeToString(long, TimeUnit) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
TimeTrackingOutputStream - Class in org.apache.spark.storage
Intercepts write calls and tracks total time spent writing in order to update shuffle write metrics.
TimeTrackingOutputStream(ShuffleWriteMetrics, OutputStream) - Constructor for class org.apache.spark.storage.TimeTrackingOutputStream
 
timeUnit() - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
 
TIMING_DATA() - Static method in class org.apache.spark.api.r.SpecialLengths
 
to(CanBuildFrom<Nothing$, A, Col>) - Static method in class org.apache.spark.sql.types.StructType
 
to(Time, Duration) - Method in class org.apache.spark.streaming.Time
 
to_date(Column) - Static method in class org.apache.spark.sql.functions
Converts the column into DateType.
to_utc_timestamp(Column, String) - Static method in class org.apache.spark.sql.functions
Assumes given timestamp is in given timezone and converts to UTC.
toApacheCommonsStats(StatCounter) - Static method in class org.apache.spark.mllib.stat.test.StudentTTest
 
toApacheCommonsStats(StatCounter) - Static method in class org.apache.spark.mllib.stat.test.WelchTTest
 
toArray() - Method in class org.apache.spark.input.PortableDataStream
Read the file as a byte array
toArray() - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
toArray() - Method in class org.apache.spark.ml.linalg.DenseVector
 
toArray() - Method in interface org.apache.spark.ml.linalg.Matrix
Converts to a dense array in column major.
toArray() - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
toArray() - Method in class org.apache.spark.ml.linalg.SparseVector
 
toArray() - Method in interface org.apache.spark.ml.linalg.Vector
Converts the instance to a double array.
toArray() - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
toArray() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
toArray() - Method in interface org.apache.spark.mllib.linalg.Matrix
Converts to a dense array in column major.
toArray() - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
toArray() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
toArray() - Method in interface org.apache.spark.mllib.linalg.Vector
Converts the instance to a double array.
toArray(ClassTag<B>) - Static method in class org.apache.spark.sql.types.StructType
 
toAttributes() - Method in class org.apache.spark.sql.types.StructType
 
toBigDecimal() - Method in class org.apache.spark.sql.types.Decimal
 
toBlockMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Converts to BlockMatrix.
toBlockMatrix(int, int) - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Converts to BlockMatrix.
toBlockMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Converts to BlockMatrix.
toBlockMatrix(int, int) - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Converts to BlockMatrix.
toBoolean(String, String) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
toBreeze() - Method in interface org.apache.spark.ml.linalg.Matrix
Converts to a breeze matrix.
toBreeze() - Method in interface org.apache.spark.ml.linalg.Vector
Converts the instance to a breeze vector.
toBreeze() - Method in interface org.apache.spark.mllib.linalg.distributed.DistributedMatrix
Collects data and assembles a local dense breeze matrix (for test only).
toBreeze() - Method in interface org.apache.spark.mllib.linalg.Matrix
Converts to a breeze matrix.
toBreeze() - Method in interface org.apache.spark.mllib.linalg.Vector
Converts the instance to a breeze vector.
toBuffer() - Static method in class org.apache.spark.sql.types.StructType
 
toByte() - Method in class org.apache.spark.sql.types.Decimal
 
toCatalystDecimal(HiveDecimalObjectInspector, Object) - Static method in class org.apache.spark.sql.hive.HiveShim
 
toCollection(Repr) - Static method in class org.apache.spark.sql.types.StructType
 
toColumn() - Method in class org.apache.spark.sql.expressions.Aggregator
Returns this Aggregator as a TypedColumn that can be used in Dataset.
toCoordinateMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Converts to CoordinateMatrix.
toCoordinateMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Converts this matrix to a CoordinateMatrix.
toDebugString() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
toDebugString() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
toDebugString() - Static method in class org.apache.spark.api.java.JavaRDD
 
toDebugString() - Method in interface org.apache.spark.api.java.JavaRDDLike
A description of this RDD and its recursive dependencies for debugging.
toDebugString() - Static method in class org.apache.spark.api.r.RRDD
 
toDebugString() - Static method in class org.apache.spark.graphx.EdgeRDD
 
toDebugString() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
toDebugString() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
toDebugString() - Static method in class org.apache.spark.graphx.VertexRDD
 
toDebugString() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
toDebugString() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
toDebugString() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
toDebugString() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
toDebugString() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
toDebugString() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
toDebugString() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Print the full model to a string.
toDebugString() - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
toDebugString() - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
toDebugString() - Static method in class org.apache.spark.rdd.HadoopRDD
 
toDebugString() - Static method in class org.apache.spark.rdd.JdbcRDD
 
toDebugString() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
toDebugString() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
toDebugString() - Method in class org.apache.spark.rdd.RDD
A description of this RDD and its recursive dependencies for debugging.
toDebugString() - Method in class org.apache.spark.SparkConf
Return a string listing all keys and values, one per line.
toDebugString() - Method in class org.apache.spark.sql.types.Decimal
 
toDegrees(Column) - Static method in class org.apache.spark.sql.functions
Converts an angle measured in radians to an approximately equivalent angle measured in degrees.
toDegrees(String) - Static method in class org.apache.spark.sql.functions
Converts an angle measured in radians to an approximately equivalent angle measured in degrees.
toDense() - Static method in class org.apache.spark.ml.linalg.DenseVector
 
toDense() - Method in class org.apache.spark.ml.linalg.SparseMatrix
Generate a DenseMatrix from the given SparseMatrix.
toDense() - Static method in class org.apache.spark.ml.linalg.SparseVector
 
toDense() - Method in interface org.apache.spark.ml.linalg.Vector
Converts this vector to a dense vector.
toDense() - Static method in class org.apache.spark.mllib.linalg.DenseVector
 
toDense() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
Generate a DenseMatrix from the given SparseMatrix.
toDense() - Static method in class org.apache.spark.mllib.linalg.SparseVector
 
toDense() - Method in interface org.apache.spark.mllib.linalg.Vector
Converts this vector to a dense vector.
toDF(String...) - Method in class org.apache.spark.sql.Dataset
Converts this strongly typed collection of data to generic DataFrame with columns renamed.
toDF() - Method in class org.apache.spark.sql.Dataset
Converts this strongly typed collection of data to generic Dataframe.
toDF(Seq<String>) - Method in class org.apache.spark.sql.Dataset
Converts this strongly typed collection of data to generic DataFrame with columns renamed.
toDF() - Method in class org.apache.spark.sql.DatasetHolder
 
toDF(Seq<String>) - Method in class org.apache.spark.sql.DatasetHolder
 
toDouble() - Method in class org.apache.spark.sql.types.Decimal
 
toDS() - Method in class org.apache.spark.sql.DatasetHolder
 
toEdgeTriplet() - Method in class org.apache.spark.graphx.EdgeContext
Converts the edge and vertex properties into an EdgeTriplet for convenience.
toErrorString() - Method in class org.apache.spark.ExceptionFailure
 
toErrorString() - Method in class org.apache.spark.ExecutorLostFailure
 
toErrorString() - Method in class org.apache.spark.FetchFailed
 
toErrorString() - Static method in class org.apache.spark.Resubmitted
 
toErrorString() - Method in class org.apache.spark.TaskCommitDenied
 
toErrorString() - Method in interface org.apache.spark.TaskFailedReason
Error message displayed in the web UI.
toErrorString() - Static method in class org.apache.spark.TaskKilled
 
toErrorString() - Static method in class org.apache.spark.TaskResultLost
 
toErrorString() - Static method in class org.apache.spark.UnknownReason
 
toFloat() - Method in class org.apache.spark.sql.types.Decimal
 
toFormattedString() - Method in class org.apache.spark.streaming.Duration
 
toHiveString(Tuple2<Object, DataType>) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
toHiveStructString(Tuple2<Object, DataType>) - Static method in class org.apache.spark.sql.hive.HiveUtils
Hive outputs fields of structs slightly differently than top level attributes.
toIndexedRowMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Converts to IndexedRowMatrix.
toIndexedRowMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Converts to IndexedRowMatrix.
toIndexedSeq() - Static method in class org.apache.spark.sql.types.StructType
 
toInspector(DataType) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
toInspector(Expression) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
toInt() - Method in class org.apache.spark.sql.types.Decimal
 
toInt() - Method in class org.apache.spark.storage.StorageLevel
 
toIterable() - Static method in class org.apache.spark.sql.types.StructType
 
toIterator() - Static method in class org.apache.spark.sql.types.StructType
 
toJavaBigDecimal() - Method in class org.apache.spark.sql.types.Decimal
 
toJavaDStream() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Convert to a JavaDStream
toJavaDStream() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
toJavaDStream() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
toJavaRDD() - Static method in class org.apache.spark.api.r.RRDD
 
toJavaRDD() - Static method in class org.apache.spark.graphx.EdgeRDD
 
toJavaRDD() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
toJavaRDD() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
toJavaRDD() - Static method in class org.apache.spark.graphx.VertexRDD
 
toJavaRDD() - Static method in class org.apache.spark.rdd.HadoopRDD
 
toJavaRDD() - Static method in class org.apache.spark.rdd.JdbcRDD
 
toJavaRDD() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
toJavaRDD() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
toJavaRDD() - Method in class org.apache.spark.rdd.RDD
 
toJavaRDD() - Method in class org.apache.spark.sql.Dataset
Returns the content of the Dataset as a JavaRDD of Rows.
toJson(Vector) - Static method in class org.apache.spark.ml.linalg.JsonVectorConverter
Coverts the vector to a JSON string.
toJson() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
toJson() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
toJson() - Method in interface org.apache.spark.mllib.linalg.Vector
Converts the vector to a JSON string.
toJSON() - Method in class org.apache.spark.sql.Dataset
Returns the content of the Dataset as a Dataset of JSON strings.
toJSON() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
Tokenizer - Class in org.apache.spark.ml.feature
:: Experimental :: A tokenizer that converts the input string to lowercase and then splits it by white spaces.
Tokenizer(String) - Constructor for class org.apache.spark.ml.feature.Tokenizer
 
Tokenizer() - Constructor for class org.apache.spark.ml.feature.Tokenizer
 
tol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
tol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
tol() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
tol() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
tol() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
tol() - Static method in class org.apache.spark.ml.clustering.KMeans
 
tol() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
tol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
tol() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
tol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
tol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
tol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
tol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
toList() - Static method in class org.apache.spark.sql.types.StructType
 
toLocal() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
Convert this distributed model to a local representation.
toLocal() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Convert model to a local model.
toLocalIterator() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
toLocalIterator() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
toLocalIterator() - Static method in class org.apache.spark.api.java.JavaRDD
 
toLocalIterator() - Method in interface org.apache.spark.api.java.JavaRDDLike
Return an iterator that contains all of the elements in this RDD.
toLocalIterator() - Static method in class org.apache.spark.api.r.RRDD
 
toLocalIterator() - Static method in class org.apache.spark.graphx.EdgeRDD
 
toLocalIterator() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
toLocalIterator() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
toLocalIterator() - Static method in class org.apache.spark.graphx.VertexRDD
 
toLocalIterator() - Static method in class org.apache.spark.rdd.HadoopRDD
 
toLocalIterator() - Static method in class org.apache.spark.rdd.JdbcRDD
 
toLocalIterator() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
toLocalIterator() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
toLocalIterator() - Method in class org.apache.spark.rdd.RDD
Return an iterator that contains all of the elements in this RDD.
toLocalIterator() - Method in class org.apache.spark.sql.Dataset
Return an iterator that contains all of Rows in this Dataset.
toLocalMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Collect the distributed matrix on the driver as a `DenseMatrix`.
toLong() - Method in class org.apache.spark.sql.types.Decimal
 
toLowercase() - Method in class org.apache.spark.ml.feature.RegexTokenizer
Indicates whether to convert all characters to lowercase before tokenizing.
toMap(Predef.$less$colon$less<A, Tuple2<T, U>>) - Static method in class org.apache.spark.sql.types.StructType
 
toMesos(Enumeration.Value) - Static method in class org.apache.spark.TaskState
 
toMetadata(Metadata) - Method in class org.apache.spark.ml.attribute.Attribute
Converts to ML metadata with some existing metadata.
toMetadata() - Method in class org.apache.spark.ml.attribute.Attribute
Converts to ML metadata
toMetadata(Metadata) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Converts to ML metadata with some existing metadata.
toMetadata() - Method in class org.apache.spark.ml.attribute.AttributeGroup
Converts to ML metadata
toMetadata(Metadata) - Static method in class org.apache.spark.ml.attribute.BinaryAttribute
 
toMetadata() - Static method in class org.apache.spark.ml.attribute.BinaryAttribute
 
toMetadata(Metadata) - Static method in class org.apache.spark.ml.attribute.NominalAttribute
 
toMetadata() - Static method in class org.apache.spark.ml.attribute.NominalAttribute
 
toMetadata(Metadata) - Static method in class org.apache.spark.ml.attribute.NumericAttribute
 
toMetadata() - Static method in class org.apache.spark.ml.attribute.NumericAttribute
 
toMetadata(Metadata) - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
toMetadata() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
toNumber(String, Function1<String, T>, String, String) - Static method in class org.apache.spark.internal.config.ConfigHelpers
 
toOld() - Method in interface org.apache.spark.ml.tree.Split
Convert to old Split format
tooltip(String, String) - Static method in class org.apache.spark.ui.UIUtils
 
ToolTips - Class in org.apache.spark.ui
 
ToolTips() - Constructor for class org.apache.spark.ui.ToolTips
 
top(int, Comparator<T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
top(int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
top(int, Comparator<T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
top(int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
top(int, Comparator<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
top(int) - Static method in class org.apache.spark.api.java.JavaRDD
 
top(int, Comparator<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Returns the top k (largest) elements from this RDD as defined by the specified Comparator[T] and maintains the order.
top(int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Returns the top k (largest) elements from this RDD using the natural ordering for T and maintains the order.
top(int, Ordering<T>) - Static method in class org.apache.spark.api.r.RRDD
 
top(int, Ordering<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
top(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
top(int, Ordering<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
top(int, Ordering<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
top(int, Ordering<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
top(int, Ordering<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
top(int, Ordering<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
top(int, Ordering<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
top(int, Ordering<T>) - Method in class org.apache.spark.rdd.RDD
Returns the top k (largest) elements from this RDD as defined by the specified implicit Ordering[T] and maintains the ordering.
toPairDStreamFunctions(DStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>, Ordering<K>) - Static method in class org.apache.spark.streaming.dstream.DStream
 
topByKey(int, Ordering<V>) - Method in class org.apache.spark.mllib.rdd.MLPairRDDFunctions
Returns the top k (largest) elements for each key from this RDD as defined by the specified implicit Ordering[T].
topDocumentsPerTopic(int) - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Return the top documents for each topic
topic() - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
topicAndPartition() - Method in class org.apache.spark.streaming.kafka.OffsetRange
Kafka TopicAndPartition object, for convenience
topicAssignments() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Return the top topic for each (doc, term) pair.
topicConcentration() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
topicConcentration() - Static method in class org.apache.spark.ml.clustering.LDA
 
topicConcentration() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
topicConcentration() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
topicConcentration() - Method in class org.apache.spark.mllib.clustering.LDAModel
Concentration parameter (commonly named "beta" or "eta") for the prior placed on topics' distributions over terms.
topicConcentration() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
topicDistribution(Vector) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Predicts the topic mixture distribution for a document (often called "theta" in the literature).
topicDistributionCol() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
topicDistributionCol() - Static method in class org.apache.spark.ml.clustering.LDA
 
topicDistributionCol() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
topicDistributions() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
For each document in the training set, return the distribution over topics for that document ("theta_doc").
topicDistributions(RDD<Tuple2<Object, Vector>>) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Predicts the topic mixture distribution for each document (often called "theta" in the literature).
topicDistributions(JavaPairRDD<Long, Vector>) - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
Java-friendly version of topicDistributions
topics() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
topicsMatrix() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
topicsMatrix() - Method in class org.apache.spark.ml.clustering.LDAModel
Inferred topics, where each topic is represented by a distribution over terms.
topicsMatrix() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
topicsMatrix() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
Inferred topics, where each topic is represented by a distribution over terms.
topicsMatrix() - Method in class org.apache.spark.mllib.clustering.LDAModel
Inferred topics, where each topic is represented by a distribution over terms.
topicsMatrix() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
topK(Iterator<Tuple2<String, Object>>, int) - Static method in class org.apache.spark.streaming.util.RawTextHelper
Gets the top k words in terms of word counts.
toPMML(String) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
toPMML(SparkContext, String) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
toPMML(OutputStream) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
toPMML() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
toPMML(String) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
toPMML(SparkContext, String) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
toPMML(OutputStream) - Static method in class org.apache.spark.mllib.classification.SVMModel
 
toPMML() - Static method in class org.apache.spark.mllib.classification.SVMModel
 
toPMML(String) - Static method in class org.apache.spark.mllib.clustering.KMeansModel
 
toPMML(SparkContext, String) - Static method in class org.apache.spark.mllib.clustering.KMeansModel
 
toPMML(OutputStream) - Static method in class org.apache.spark.mllib.clustering.KMeansModel
 
toPMML() - Static method in class org.apache.spark.mllib.clustering.KMeansModel
 
toPMML(StreamResult) - Method in interface org.apache.spark.mllib.pmml.PMMLExportable
Export the model to the stream result in PMML format
toPMML(String) - Method in interface org.apache.spark.mllib.pmml.PMMLExportable
:: Experimental :: Export the model to a local file in PMML format
toPMML(SparkContext, String) - Method in interface org.apache.spark.mllib.pmml.PMMLExportable
:: Experimental :: Export the model to a directory on a distributed file system in PMML format
toPMML(OutputStream) - Method in interface org.apache.spark.mllib.pmml.PMMLExportable
:: Experimental :: Export the model to the OutputStream in PMML format
toPMML() - Method in interface org.apache.spark.mllib.pmml.PMMLExportable
:: Experimental :: Export the model to a String in PMML format
toPMML(String) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
toPMML(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
toPMML(OutputStream) - Static method in class org.apache.spark.mllib.regression.LassoModel
 
toPMML() - Static method in class org.apache.spark.mllib.regression.LassoModel
 
toPMML(String) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
toPMML(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
toPMML(OutputStream) - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
toPMML() - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
toPMML(String) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
toPMML(SparkContext, String) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
toPMML(OutputStream) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
toPMML() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
topNode() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
 
toPredict() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.PredictData
 
topTopicsPerDocument(int) - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
For each document, return the top k weighted topics for that document and their weights.
toPythonIterator() - Method in class org.apache.spark.sql.Dataset
 
toRadians(Column) - Static method in class org.apache.spark.sql.functions
Converts an angle measured in degrees to an approximately equivalent angle measured in radians.
toRadians(String) - Static method in class org.apache.spark.sql.functions
Converts an angle measured in degrees to an approximately equivalent angle measured in radians.
toRDD(JavaDoubleRDD) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
toRDD(JavaPairRDD<K, V>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
toRDD(JavaRDD<T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
toRowMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Converts to RowMatrix, dropping row indices after grouping by row index.
toRowMatrix() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRowMatrix
Drops row indices and converts this matrix to a RowMatrix.
toSeq() - Method in class org.apache.spark.ml.param.ParamMap
Converts this param map to a sequence of param pairs.
toSeq() - Method in interface org.apache.spark.sql.Row
Return a Scala Seq representing the row.
toSeq() - Static method in class org.apache.spark.sql.types.StructType
 
toSet() - Static method in class org.apache.spark.sql.types.StructType
 
toShort() - Method in class org.apache.spark.sql.types.Decimal
 
toSparkContext(JavaSparkContext) - Static method in class org.apache.spark.api.java.JavaSparkContext
 
toSparse() - Method in class org.apache.spark.ml.linalg.DenseMatrix
Generate a SparseMatrix from the given DenseMatrix.
toSparse() - Method in class org.apache.spark.ml.linalg.DenseVector
 
toSparse() - Method in class org.apache.spark.ml.linalg.SparseVector
 
toSparse() - Method in interface org.apache.spark.ml.linalg.Vector
Converts this vector to a sparse vector with all explicit zeros removed.
toSparse() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate a SparseMatrix from the given DenseMatrix.
toSparse() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
toSparse() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
toSparse() - Method in interface org.apache.spark.mllib.linalg.Vector
Converts this vector to a sparse vector with all explicit zeros removed.
toSplit() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.SplitData
 
toSplitInfo(Class<?>, String, InputSplit) - Static method in class org.apache.spark.scheduler.SplitInfo
 
toSplitInfo(Class<?>, String, InputSplit) - Static method in class org.apache.spark.scheduler.SplitInfo
 
toStream() - Static method in class org.apache.spark.sql.types.StructType
 
toString() - Method in class org.apache.spark.Accumulable
Deprecated.
 
toString() - Static method in class org.apache.spark.Accumulator
Deprecated.
 
toString() - Method in class org.apache.spark.api.java.JavaRDD
 
toString() - Method in class org.apache.spark.api.java.Optional
 
toString() - Static method in class org.apache.spark.api.r.RRDD
 
toString() - Method in class org.apache.spark.broadcast.Broadcast
 
toString() - Method in class org.apache.spark.graphx.EdgeDirection
 
toString() - Static method in class org.apache.spark.graphx.EdgeRDD
 
toString() - Method in class org.apache.spark.graphx.EdgeTriplet
 
toString() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
toString() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
toString() - Static method in class org.apache.spark.graphx.VertexRDD
 
toString() - Method in class org.apache.spark.io.LZ4BlockInputStream
 
toString() - Method in class org.apache.spark.ml.attribute.Attribute
 
toString() - Method in class org.apache.spark.ml.attribute.AttributeGroup
 
toString() - Static method in class org.apache.spark.ml.attribute.BinaryAttribute
 
toString() - Static method in class org.apache.spark.ml.attribute.NominalAttribute
 
toString() - Static method in class org.apache.spark.ml.attribute.NumericAttribute
 
toString() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
toString() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
toString() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
toString() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
toString() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
toString() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
toString() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
toString() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
toString() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
toString() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
toString() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
toString() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
toString() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
toString() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
toString() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
toString() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
toString() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
toString() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
toString() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
toString() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
toString() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
toString() - Static method in class org.apache.spark.ml.clustering.KMeans
 
toString() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
toString() - Static method in class org.apache.spark.ml.clustering.LDA
 
toString() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
toString() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
toString() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
toString() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
toString() - Static method in class org.apache.spark.ml.feature.Binarizer
 
toString() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
toString() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
toString() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
toString() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
toString() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
toString() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
toString() - Static method in class org.apache.spark.ml.feature.DCT
 
toString() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
toString() - Static method in class org.apache.spark.ml.feature.HashingTF
 
toString() - Static method in class org.apache.spark.ml.feature.IDF
 
toString() - Static method in class org.apache.spark.ml.feature.IDFModel
 
toString() - Static method in class org.apache.spark.ml.feature.IndexToString
 
toString() - Static method in class org.apache.spark.ml.feature.Interaction
 
toString() - Method in class org.apache.spark.ml.feature.LabeledPoint
 
toString() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
toString() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
toString() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
toString() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
toString() - Static method in class org.apache.spark.ml.feature.NGram
 
toString() - Static method in class org.apache.spark.ml.feature.Normalizer
 
toString() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
toString() - Static method in class org.apache.spark.ml.feature.PCA
 
toString() - Static method in class org.apache.spark.ml.feature.PCAModel
 
toString() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
toString() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
toString() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
toString() - Method in class org.apache.spark.ml.feature.RFormula
 
toString() - Method in class org.apache.spark.ml.feature.RFormulaModel
 
toString() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
toString() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
toString() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
toString() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
toString() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
toString() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
toString() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
toString() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
toString() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
toString() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
toString() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
toString() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
toString() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
toString() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
toString() - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
toString(int, int) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
 
toString() - Method in class org.apache.spark.ml.linalg.DenseVector
 
toString() - Method in interface org.apache.spark.ml.linalg.Matrix
A human readable representation of the matrix
toString(int, int) - Method in interface org.apache.spark.ml.linalg.Matrix
A human readable representation of the matrix with maximum lines and width
toString() - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
toString(int, int) - Static method in class org.apache.spark.ml.linalg.SparseMatrix
 
toString() - Method in class org.apache.spark.ml.linalg.SparseVector
 
toString() - Static method in class org.apache.spark.ml.param.DoubleParam
 
toString() - Static method in class org.apache.spark.ml.param.FloatParam
 
toString() - Method in class org.apache.spark.ml.param.Param
 
toString() - Method in class org.apache.spark.ml.param.ParamMap
 
toString() - Static method in class org.apache.spark.ml.Pipeline
 
toString() - Static method in class org.apache.spark.ml.PipelineModel
 
toString() - Static method in class org.apache.spark.ml.recommendation.ALS
 
toString() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
toString() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
toString() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
toString() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
toString() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
toString() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
toString() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
toString() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
toString() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
toString() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
toString() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
toString() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
toString() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
toString() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
toString() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
toString() - Method in class org.apache.spark.ml.source.libsvm.DefaultSource
 
toString() - Method in class org.apache.spark.ml.tree.InternalNode
 
toString() - Method in class org.apache.spark.ml.tree.LeafNode
 
toString() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
toString() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
toString() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
toString() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
toString() - Method in interface org.apache.spark.ml.util.Identifiable
 
toString() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
toString() - Method in class org.apache.spark.mllib.classification.SVMModel
 
toString() - Method in class org.apache.spark.mllib.fpm.AssociationRules.Rule
 
toString() - Method in class org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
 
toString() - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
toString(int, int) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
 
toString() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
toString() - Method in interface org.apache.spark.mllib.linalg.Matrix
A human readable representation of the matrix
toString(int, int) - Method in interface org.apache.spark.mllib.linalg.Matrix
A human readable representation of the matrix with maximum lines and width
toString() - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
toString(int, int) - Static method in class org.apache.spark.mllib.linalg.SparseMatrix
 
toString() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
toString() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearModel
Print a summary of the model.
toString() - Method in class org.apache.spark.mllib.regression.LabeledPoint
 
toString() - Static method in class org.apache.spark.mllib.regression.LassoModel
 
toString() - Static method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
toString() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
toString() - Method in class org.apache.spark.mllib.stat.test.BinarySample
 
toString() - Method in class org.apache.spark.mllib.stat.test.ChiSqTestResult
 
toString() - Method in class org.apache.spark.mllib.stat.test.KolmogorovSmirnovTestResult
 
toString() - Method in interface org.apache.spark.mllib.stat.test.TestResult
String explaining the hypothesis test result.
toString() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
toString() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
toString() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
toString() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
toString() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel
Print a summary of the model.
toString() - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
toString() - Method in class org.apache.spark.mllib.tree.model.InformationGainStats
 
toString() - Method in class org.apache.spark.mllib.tree.model.Node
 
toString() - Method in class org.apache.spark.mllib.tree.model.Predict
 
toString() - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
toString() - Method in class org.apache.spark.mllib.tree.model.Split
 
toString() - Method in class org.apache.spark.partial.BoundedDouble
 
toString() - Method in class org.apache.spark.partial.PartialResult
 
toString() - Static method in class org.apache.spark.rdd.CheckpointState
 
toString() - Static method in class org.apache.spark.rdd.HadoopRDD
 
toString() - Static method in class org.apache.spark.rdd.JdbcRDD
 
toString() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
toString() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
toString() - Method in class org.apache.spark.rdd.RDD
 
toString() - Static method in class org.apache.spark.scheduler.ExecutorKilled
 
toString() - Method in class org.apache.spark.scheduler.InputFormatInfo
 
toString() - Static method in class org.apache.spark.scheduler.LossReasonPending
 
toString() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
toString() - Method in class org.apache.spark.scheduler.SplitInfo
 
toString() - Static method in class org.apache.spark.scheduler.TaskLocality
 
toString() - Method in class org.apache.spark.SerializableWritable
 
toString() - Static method in exception org.apache.spark.sql.AnalysisException
 
toString() - Method in class org.apache.spark.sql.catalog.Column
 
toString() - Method in class org.apache.spark.sql.catalog.Database
 
toString() - Method in class org.apache.spark.sql.catalog.Function
 
toString() - Method in class org.apache.spark.sql.catalog.Table
 
toString() - Method in class org.apache.spark.sql.Column
 
toString() - Method in exception org.apache.spark.sql.ContinuousQueryException
 
toString() - Method in class org.apache.spark.sql.Dataset
 
toString() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
toString() - Method in interface org.apache.spark.sql.Row
 
toString() - Method in class org.apache.spark.sql.sources.In
 
toString() - Method in class org.apache.spark.sql.types.Decimal
 
toString() - Method in class org.apache.spark.sql.types.DecimalType
 
toString() - Method in class org.apache.spark.sql.types.Metadata
 
toString() - Method in class org.apache.spark.sql.types.StructField
 
toString() - Static method in class org.apache.spark.sql.types.StructType
 
toString() - Method in class org.apache.spark.storage.BlockId
 
toString() - Method in class org.apache.spark.storage.BlockManagerId
 
toString() - Static method in class org.apache.spark.storage.BroadcastBlockId
 
toString() - Static method in class org.apache.spark.storage.RDDBlockId
 
toString() - Method in class org.apache.spark.storage.RDDInfo
 
toString() - Static method in class org.apache.spark.storage.ShuffleBlockId
 
toString() - Static method in class org.apache.spark.storage.ShuffleDataBlockId
 
toString() - Static method in class org.apache.spark.storage.ShuffleIndexBlockId
 
toString() - Method in class org.apache.spark.storage.StorageLevel
 
toString() - Static method in class org.apache.spark.storage.StreamBlockId
 
toString() - Static method in class org.apache.spark.storage.TaskResultBlockId
 
toString() - Method in class org.apache.spark.streaming.Duration
 
toString() - Method in class org.apache.spark.streaming.kafka.Broker
 
toString() - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
toString() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
toString() - Method in class org.apache.spark.streaming.State
 
toString() - Method in class org.apache.spark.streaming.Time
 
toString() - Static method in class org.apache.spark.TaskState
 
toString() - Method in class org.apache.spark.util.AccumulatorV2
 
toString() - Method in class org.apache.spark.util.MutablePair
 
toString() - Method in class org.apache.spark.util.StatCounter
 
toStructField(Metadata) - Method in class org.apache.spark.ml.attribute.Attribute
Converts to a StructField with some existing metadata.
toStructField() - Method in class org.apache.spark.ml.attribute.Attribute
Converts to a StructField.
toStructField(Metadata) - Method in class org.apache.spark.ml.attribute.AttributeGroup
Converts to a StructField with some existing metadata.
toStructField() - Method in class org.apache.spark.ml.attribute.AttributeGroup
Converts to a StructField.
toStructField(Metadata) - Static method in class org.apache.spark.ml.attribute.BinaryAttribute
 
toStructField() - Static method in class org.apache.spark.ml.attribute.BinaryAttribute
 
toStructField(Metadata) - Static method in class org.apache.spark.ml.attribute.NominalAttribute
 
toStructField() - Static method in class org.apache.spark.ml.attribute.NominalAttribute
 
toStructField(Metadata) - Static method in class org.apache.spark.ml.attribute.NumericAttribute
 
toStructField() - Static method in class org.apache.spark.ml.attribute.NumericAttribute
 
toStructField(Metadata) - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
toStructField() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
totalBlocksFetched() - Method in class org.apache.spark.status.api.v1.ShuffleReadMetricDistributions
 
totalCores() - Method in class org.apache.spark.scheduler.cluster.ExecutorInfo
 
totalCores() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
totalCount() - Method in class org.apache.spark.util.sketch.CountMinSketch
Total count of items added to this CountMinSketch so far.
totalDelay() - Method in class org.apache.spark.streaming.scheduler.BatchInfo
Time taken for all the jobs of this batch to finish processing from the time they were submitted.
totalDuration() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
totalGCTime() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
totalInputBytes() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
totalIterations() - Method in interface org.apache.spark.ml.classification.LogisticRegressionTrainingSummary
Number of training iterations until termination
totalIterations() - Method in class org.apache.spark.ml.regression.LinearRegressionTrainingSummary
Number of training iterations until termination
totalNumNodes() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
totalNumNodes() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
totalNumNodes() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
totalNumNodes() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
totalNumNodes() - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
totalNumNodes() - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
totalShuffleRead() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
totalShuffleWrite() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
totalTasks() - Method in class org.apache.spark.status.api.v1.ExecutorSummary
 
toTraversable() - Static method in class org.apache.spark.sql.types.StructType
 
toTuple() - Method in class org.apache.spark.graphx.EdgeTriplet
 
toUnscaledLong() - Method in class org.apache.spark.sql.types.Decimal
 
toVector() - Static method in class org.apache.spark.sql.types.StructType
 
train(Dataset<?>) - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
train(Dataset<?>) - Method in class org.apache.spark.ml.classification.GBTClassifier
 
train(Dataset<?>) - Method in class org.apache.spark.ml.classification.LogisticRegression
 
train(Dataset<?>, boolean) - Method in class org.apache.spark.ml.classification.LogisticRegression
 
train(Dataset<?>) - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
Train a model using the given dataset and parameters.
train(Dataset<?>) - Method in class org.apache.spark.ml.classification.NaiveBayes
 
train(Dataset<?>) - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
train(Dataset<?>) - Method in class org.apache.spark.ml.Predictor
Train a model using the given dataset and parameters.
train(RDD<ALS.Rating<ID>>, int, int, int, int, double, boolean, double, boolean, StorageLevel, StorageLevel, int, long, ClassTag<ID>, Ordering<ID>) - Static method in class org.apache.spark.ml.recommendation.ALS
:: DeveloperApi :: Implementation of the ALS algorithm.
train(Dataset<?>) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
train(Dataset<?>) - Method in class org.apache.spark.ml.regression.GBTRegressor
 
train(Dataset<?>) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
train(Dataset<?>) - Method in class org.apache.spark.ml.regression.LinearRegression
 
train(Dataset<?>) - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
train(RDD<LabeledPoint>, int, double, double, Vector) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
Train a logistic regression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
Train a logistic regression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
Train a logistic regression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
Train a logistic regression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
Trains a Naive Bayes model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, double) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
Trains a Naive Bayes model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, double, String) - Static method in class org.apache.spark.mllib.classification.NaiveBayes
Trains a Naive Bayes model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double, double, Vector) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
Train a SVM model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double, double) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
Train a SVM model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
Train a SVM model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
Train a SVM model given an RDD of (label, features) pairs.
train(RDD<Vector>, int, int, int, String, long) - Static method in class org.apache.spark.mllib.clustering.KMeans
Trains a k-means model using the given set of parameters.
train(RDD<Vector>, int, int, int, String) - Static method in class org.apache.spark.mllib.clustering.KMeans
Trains a k-means model using the given set of parameters.
train(RDD<Vector>, int, int) - Static method in class org.apache.spark.mllib.clustering.KMeans
Trains a k-means model using specified parameters and the default values for unspecified.
train(RDD<Vector>, int, int, int) - Static method in class org.apache.spark.mllib.clustering.KMeans
Trains a k-means model using specified parameters and the default values for unspecified.
train(RDD<Rating>, int, int, double, int, long) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of ratings by users for a subset of products.
train(RDD<Rating>, int, int, double, int) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of ratings by users for a subset of products.
train(RDD<Rating>, int, int, double) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of ratings by users for a subset of products.
train(RDD<Rating>, int, int) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of ratings by users for a subset of products.
train(RDD<LabeledPoint>, int, double, double, double, Vector) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
Train a Lasso model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double, double) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
Train a Lasso model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
Train a Lasso model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
Train a Lasso model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double, Vector) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
Train a Linear Regression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
Train a LinearRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
Train a LinearRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
Train a LinearRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double, double, Vector) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
Train a RidgeRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double, double) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
Train a RidgeRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int, double, double) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
Train a RidgeRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, int) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
Train a RidgeRegression model given an RDD of (label, features) pairs.
train(RDD<LabeledPoint>, Strategy) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model.
train(RDD<LabeledPoint>, Enumeration.Value, Impurity, int) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model.
train(RDD<LabeledPoint>, Enumeration.Value, Impurity, int, int) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model.
train(RDD<LabeledPoint>, Enumeration.Value, Impurity, int, int, int, Enumeration.Value, Map<Object, Object>) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model.
train(RDD<LabeledPoint>, BoostingStrategy) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
Method to train a gradient boosting model.
train(JavaRDD<LabeledPoint>, BoostingStrategy) - Static method in class org.apache.spark.mllib.tree.GradientBoostedTrees
Java-friendly API for GradientBoostedTrees$.train(org.apache.spark.rdd.RDD<org.apache.spark.mllib.regression.LabeledPoint>, org.apache.spark.mllib.tree.configuration.BoostingStrategy)
trainClassifier(RDD<LabeledPoint>, int, Map<Object, Object>, String, int, int) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model for binary or multiclass classification.
trainClassifier(JavaRDD<LabeledPoint>, int, Map<Integer, Integer>, String, int, int) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Java-friendly API for DecisionTree$.trainClassifier(org.apache.spark.rdd.RDD<org.apache.spark.mllib.regression.LabeledPoint>, int, scala.collection.immutable.Map<java.lang.Object, java.lang.Object>, java.lang.String, int, int)
trainClassifier(RDD<LabeledPoint>, Strategy, int, String, int) - Static method in class org.apache.spark.mllib.tree.RandomForest
Method to train a decision tree model for binary or multiclass classification.
trainClassifier(RDD<LabeledPoint>, int, Map<Object, Object>, int, String, String, int, int, int) - Static method in class org.apache.spark.mllib.tree.RandomForest
Method to train a decision tree model for binary or multiclass classification.
trainClassifier(JavaRDD<LabeledPoint>, int, Map<Integer, Integer>, int, String, String, int, int, int) - Static method in class org.apache.spark.mllib.tree.RandomForest
Java-friendly API for RandomForest$.trainClassifier(org.apache.spark.rdd.RDD<org.apache.spark.mllib.regression.LabeledPoint>, org.apache.spark.mllib.tree.configuration.Strategy, int, java.lang.String, int)
trainImplicit(RDD<Rating>, int, int, double, int, double, long) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of 'implicit preferences' given by users to some products, in the form of (userID, productID, preference) pairs.
trainImplicit(RDD<Rating>, int, int, double, int, double) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of 'implicit preferences' of users for a subset of products.
trainImplicit(RDD<Rating>, int, int, double, double) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of 'implicit preferences' of users for a subset of products.
trainImplicit(RDD<Rating>, int, int) - Static method in class org.apache.spark.mllib.recommendation.ALS
Train a matrix factorization model given an RDD of 'implicit preferences' of users for a subset of products.
trainingLogLikelihood() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
Log likelihood of the observed tokens in the training set, given the current parameter estimates: log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters)
trainOn(DStream<Vector>) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Update the clustering model by training on batches of data from a DStream.
trainOn(JavaDStream<Vector>) - Method in class org.apache.spark.mllib.clustering.StreamingKMeans
Java-friendly version of trainOn.
trainOn(DStream<LabeledPoint>) - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Update the model by training on batches of data from a DStream.
trainOn(JavaDStream<LabeledPoint>) - Method in class org.apache.spark.mllib.regression.StreamingLinearAlgorithm
Java-friendly version of trainOn.
trainRatio() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
trainRatio() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
trainRegressor(RDD<LabeledPoint>, Map<Object, Object>, String, int, int) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Method to train a decision tree model for regression.
trainRegressor(JavaRDD<LabeledPoint>, Map<Integer, Integer>, String, int, int) - Static method in class org.apache.spark.mllib.tree.DecisionTree
Java-friendly API for DecisionTree$.trainRegressor(org.apache.spark.rdd.RDD<org.apache.spark.mllib.regression.LabeledPoint>, scala.collection.immutable.Map<java.lang.Object, java.lang.Object>, java.lang.String, int, int)
trainRegressor(RDD<LabeledPoint>, Strategy, int, String, int) - Static method in class org.apache.spark.mllib.tree.RandomForest
Method to train a decision tree model for regression.
trainRegressor(RDD<LabeledPoint>, Map<Object, Object>, int, String, String, int, int, int) - Static method in class org.apache.spark.mllib.tree.RandomForest
Method to train a decision tree model for regression.
trainRegressor(JavaRDD<LabeledPoint>, Map<Integer, Integer>, int, String, String, int, int, int) - Static method in class org.apache.spark.mllib.tree.RandomForest
Java-friendly API for RandomForest$.trainRegressor(org.apache.spark.rdd.RDD<org.apache.spark.mllib.regression.LabeledPoint>, org.apache.spark.mllib.tree.configuration.Strategy, int, java.lang.String, int)
TrainValidationSplit - Class in org.apache.spark.ml.tuning
:: Experimental :: Validation for hyper-parameter tuning.
TrainValidationSplit(String) - Constructor for class org.apache.spark.ml.tuning.TrainValidationSplit
 
TrainValidationSplit() - Constructor for class org.apache.spark.ml.tuning.TrainValidationSplit
 
TrainValidationSplitModel - Class in org.apache.spark.ml.tuning
:: Experimental :: Model from train validation split.
transform(Dataset<?>) - Method in class org.apache.spark.ml.classification.ClassificationModel
Transforms dataset by reading from featuresCol, and appending new columns as specified by parameters: - predicted labels as predictionCol of type Double - raw predictions (confidences) as rawPredictionCol of type Vector.
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.classification.OneVsRestModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
Transforms dataset by reading from featuresCol, and appending new columns as specified by parameters: - predicted labels as predictionCol of type Double - raw predictions (confidences) as rawPredictionCol of type Vector - probability of each class as probabilityCol of type Vector.
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.clustering.KMeansModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.clustering.LDAModel
Transforms the input dataset.
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.Binarizer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.Bucketizer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.ColumnPruner
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.DCT
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.DCT
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.DCT
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.DCT
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.HashingTF
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.IDFModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.IndexToString
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.Interaction
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.NGram
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.NGram
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.NGram
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.NGram
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.Normalizer
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.Normalizer
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.Normalizer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.PCAModel
Transform a vector by computed Principal Components.
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.RFormulaModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.SQLTransformer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.VectorAssembler
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.feature.Word2VecModel
Transform a sentence column to a vector column to represent the whole sentence.
transform(Dataset<?>) - Method in class org.apache.spark.ml.PipelineModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.PredictionModel
Transforms dataset by reading from featuresCol, calling predict(), and storing the predictions as a new column predictionCol.
transform(Dataset<?>) - Method in class org.apache.spark.ml.recommendation.ALSModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transform(Dataset<?>, ParamMap) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transform(Dataset<?>) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transform(Dataset<?>, ParamPair<?>, ParamPair<?>...) - Method in class org.apache.spark.ml.Transformer
Transforms the dataset with optional parameters
transform(Dataset<?>, ParamPair<?>, Seq<ParamPair<?>>) - Method in class org.apache.spark.ml.Transformer
Transforms the dataset with optional parameters
transform(Dataset<?>, ParamMap) - Method in class org.apache.spark.ml.Transformer
Transforms the dataset with provided parameter map as additional parameters.
transform(Dataset<?>) - Method in class org.apache.spark.ml.Transformer
Transforms the input dataset.
transform(Dataset<?>) - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
transform(Dataset<?>) - Method in class org.apache.spark.ml.UnaryTransformer
 
transform(Vector) - Method in class org.apache.spark.mllib.feature.ChiSqSelectorModel
Applies transformation on a vector.
transform(Vector) - Method in class org.apache.spark.mllib.feature.ElementwiseProduct
Does the hadamard product transformation.
transform(Iterable<?>) - Method in class org.apache.spark.mllib.feature.HashingTF
Transforms the input document into a sparse term frequency vector.
transform(Iterable<?>) - Method in class org.apache.spark.mllib.feature.HashingTF
Transforms the input document into a sparse term frequency vector (Java version).
transform(RDD<D>) - Method in class org.apache.spark.mllib.feature.HashingTF
Transforms the input document to term frequency vectors.
transform(JavaRDD<D>) - Method in class org.apache.spark.mllib.feature.HashingTF
Transforms the input document to term frequency vectors (Java version).
transform(RDD<Vector>) - Method in class org.apache.spark.mllib.feature.IDFModel
Transforms term frequency (TF) vectors to TF-IDF vectors.
transform(Vector) - Method in class org.apache.spark.mllib.feature.IDFModel
Transforms a term frequency (TF) vector to a TF-IDF vector
transform(JavaRDD<Vector>) - Method in class org.apache.spark.mllib.feature.IDFModel
Transforms term frequency (TF) vectors to TF-IDF vectors (Java version).
transform(Vector) - Method in class org.apache.spark.mllib.feature.Normalizer
Applies unit length normalization on a vector.
transform(Vector) - Method in class org.apache.spark.mllib.feature.PCAModel
Transform a vector by computed Principal Components.
transform(Vector) - Method in class org.apache.spark.mllib.feature.StandardScalerModel
Applies standardization transformation on a vector.
transform(Vector) - Method in interface org.apache.spark.mllib.feature.VectorTransformer
Applies transformation on a vector.
transform(RDD<Vector>) - Method in interface org.apache.spark.mllib.feature.VectorTransformer
Applies transformation on an RDD[Vector].
transform(JavaRDD<Vector>) - Method in interface org.apache.spark.mllib.feature.VectorTransformer
Applies transformation on an JavaRDD[Vector].
transform(String) - Method in class org.apache.spark.mllib.feature.Word2VecModel
Transforms a word to its vector representation
transform(Function1<Dataset<T>, Dataset<U>>) - Method in class org.apache.spark.sql.Dataset
Concise syntax for chaining custom transformations.
transform(PartialFunction<BaseType, BaseType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transform(Function<R, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transform(Function2<R, Time, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transform(Function<R, JavaRDD<U>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream.
transform(Function2<R, Time, JavaRDD<U>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream.
transform(Function<R, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transform(Function2<R, Time, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transform(Function<R, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transform(Function2<R, Time, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transform(Function<R, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transform(Function2<R, Time, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transform(Function<R, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transform(Function2<R, Time, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transform(Function<R, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transform(Function2<R, Time, JavaRDD<U>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transform(List<JavaDStream<?>>, Function2<List<JavaRDD<?>>, Time, JavaRDD<T>>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a new DStream in which each RDD is generated by applying a function on RDDs of the DStreams.
transform(Function1<RDD<T>, RDD<U>>, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream.
transform(Function2<RDD<T>, Time, RDD<U>>, ClassTag<U>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream.
transform(Seq<DStream<?>>, Function2<Seq<RDD<?>>, Time, RDD<T>>, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Create a new DStream in which each RDD is generated by applying a function on RDDs of the DStreams.
transformAllExpressions(PartialFunction<Expression, Expression>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transformChildren(PartialFunction<BaseType, BaseType>, Function2<BaseType, PartialFunction<BaseType, BaseType>, BaseType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transformDown(PartialFunction<BaseType, BaseType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
Transformer - Class in org.apache.spark.ml
:: DeveloperApi :: Abstract class for transformers that transform one dataset into another.
Transformer() - Constructor for class org.apache.spark.ml.Transformer
 
transformExpressions(PartialFunction<Expression, Expression>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transformExpressionsDown(PartialFunction<Expression, Expression>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transformExpressionsUp(PartialFunction<Expression, Expression>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transformImpl(Dataset<?>) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transformImpl(Dataset<?>) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transformImpl(Dataset<?>) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transformImpl(Dataset<?>) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transformImpl(Dataset<?>) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.PredictionModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
transformImpl(Dataset<?>) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transformImpl(Dataset<?>) - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.classification.OneVsRest
 
transformSchema(StructType) - Method in class org.apache.spark.ml.classification.OneVsRestModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.KMeans
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.KMeansModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.LDA
 
transformSchema(StructType) - Method in class org.apache.spark.ml.clustering.LDAModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.Binarizer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.Bucketizer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.ColumnPruner
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.CountVectorizer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.DCT
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.DCT
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.HashingTF
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.IDF
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.IDFModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.IndexToString
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.Interaction
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.MaxAbsScaler
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.NGram
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.NGram
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.Normalizer
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.Normalizer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.PCA
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.PCAModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.RFormula
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.RFormulaModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.SQLTransformer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.StandardScaler
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.StringIndexer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.feature.Tokenizer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.VectorAssembler
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.VectorIndexer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.VectorSlicer
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.Word2Vec
 
transformSchema(StructType) - Method in class org.apache.spark.ml.feature.Word2VecModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.Pipeline
 
transformSchema(StructType) - Method in class org.apache.spark.ml.PipelineModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.PipelineStage
:: DeveloperApi ::
transformSchema(StructType, boolean) - Method in class org.apache.spark.ml.PipelineStage
:: DeveloperApi ::
transformSchema(StructType) - Method in class org.apache.spark.ml.PredictionModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.Predictor
 
transformSchema(StructType) - Method in class org.apache.spark.ml.recommendation.ALS
 
transformSchema(StructType) - Method in class org.apache.spark.ml.recommendation.ALSModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
transformSchema(StructType) - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
transformSchema(StructType) - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
transformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
transformSchema(StructType) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
transformSchema(StructType) - Method in class org.apache.spark.ml.tuning.CrossValidator
 
transformSchema(StructType) - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
transformSchema(StructType) - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
transformSchema(StructType) - Method in class org.apache.spark.ml.UnaryTransformer
 
transformSchemaImpl(StructType) - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
transformSchemaImpl(StructType) - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
transformSchemaImpl(StructType) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
transformSchemaImpl(StructType) - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream.
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream.
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transformToPair(Function<R, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transformToPair(Function2<R, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transformToPair(List<JavaDStream<?>>, Function2<List<JavaRDD<?>>, Time, JavaPairRDD<K, V>>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a new DStream in which each RDD is generated by applying a function on RDDs of the DStreams.
transformUp(PartialFunction<BaseType, BaseType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream and 'other' DStream.
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream and 'other' DStream.
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transformWith(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transformWith(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaRDD<W>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transformWith(DStream<U>, Function2<RDD<T>, RDD<U>, RDD<V>>, ClassTag<U>, ClassTag<V>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream and 'other' DStream.
transformWith(DStream<U>, Function3<RDD<T>, RDD<U>, Time, RDD<V>>, ClassTag<U>, ClassTag<V>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream and 'other' DStream.
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Static method in class org.apache.spark.streaming.api.java.JavaDStream
 
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream and 'other' DStream.
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
Return a new DStream in which each RDD is generated by applying a function on each RDD of 'this' DStream and 'other' DStream.
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
transformWithToPair(JavaDStream<U>, Function3<R, JavaRDD<U>, Time, JavaPairRDD<K2, V2>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
transformWithToPair(JavaPairDStream<K2, V2>, Function3<R, JavaPairRDD<K2, V2>, Time, JavaPairRDD<K3, V3>>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
translate(Column, String, String) - Static method in class org.apache.spark.sql.functions
Translate any character in the src by a character in replaceString.
transpose() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
transpose() - Method in interface org.apache.spark.ml.linalg.Matrix
Transpose the Matrix.
transpose() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
transpose() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
transpose() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Transpose this BlockMatrix.
transpose() - Method in class org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
Transposes this CoordinateMatrix.
transpose() - Method in interface org.apache.spark.mllib.linalg.Matrix
Transpose the Matrix.
transpose() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
transpose(Function1<A, GenTraversableOnce<B>>) - Static method in class org.apache.spark.sql.types.StructType
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int) - Static method in class org.apache.spark.api.java.JavaRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Static method in class org.apache.spark.api.java.JavaRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Aggregates the elements of this RDD in a multi-level tree pattern.
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>) - Method in interface org.apache.spark.api.java.JavaRDDLike
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Aggregates the elements of this RDD in a multi-level tree pattern.
treeAggregate$default$4(U) - Static method in class org.apache.spark.api.r.RRDD
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.graphx.EdgeRDD
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.graphx.VertexRDD
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.rdd.HadoopRDD
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.rdd.JdbcRDD
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
treeAggregate$default$4(U) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
treeChildren() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
treeID() - Method in class org.apache.spark.ml.tree.EnsembleModelReadWrite.EnsembleNodeData
 
treeId() - Method in class org.apache.spark.mllib.tree.model.DecisionTreeModel.SaveLoadV1_0$.NodeData
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
treeReduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
treeReduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.api.java.JavaRDD
 
treeReduce(Function2<T, T, T>) - Static method in class org.apache.spark.api.java.JavaRDD
 
treeReduce(Function2<T, T, T>, int) - Method in interface org.apache.spark.api.java.JavaRDDLike
Reduces the elements of this RDD in a multi-level tree pattern.
treeReduce(Function2<T, T, T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.api.r.RRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.graphx.EdgeRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.graphx.VertexRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.rdd.HadoopRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.rdd.JdbcRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
treeReduce(Function2<T, T, T>, int) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
treeReduce(Function2<T, T, T>, int) - Method in class org.apache.spark.rdd.RDD
Reduces the elements of this RDD in a multi-level tree pattern.
treeReduce$default$2() - Static method in class org.apache.spark.api.r.RRDD
 
treeReduce$default$2() - Static method in class org.apache.spark.graphx.EdgeRDD
 
treeReduce$default$2() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
treeReduce$default$2() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
treeReduce$default$2() - Static method in class org.apache.spark.graphx.VertexRDD
 
treeReduce$default$2() - Static method in class org.apache.spark.rdd.HadoopRDD
 
treeReduce$default$2() - Static method in class org.apache.spark.rdd.JdbcRDD
 
treeReduce$default$2() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
treeReduce$default$2() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
trees() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
trees() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
trees() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
trees() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
trees() - Method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
trees() - Method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
treeStrategy() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
treeString() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
treeString() - Method in class org.apache.spark.sql.types.StructType
 
treeWeights() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
treeWeights() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
treeWeights() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
treeWeights() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
treeWeights() - Method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
 
treeWeights() - Static method in class org.apache.spark.mllib.tree.model.RandomForestModel
 
triangleCount() - Method in class org.apache.spark.graphx.GraphOps
Compute the number of triangles passing through each vertex.
TriangleCount - Class in org.apache.spark.graphx.lib
Compute the number of triangles passing through each vertex.
TriangleCount() - Constructor for class org.apache.spark.graphx.lib.TriangleCount
 
trigger(Trigger) - Method in class org.apache.spark.sql.DataFrameWriter
:: Experimental :: Set the trigger for the stream query.
Trigger - Interface in org.apache.spark.sql
:: Experimental :: Used to indicate how often results should be produced by a ContinuousQuery.
trim(Column) - Static method in class org.apache.spark.sql.functions
Trim the spaces from both ends for the specified string column.
TripletFields - Class in org.apache.spark.graphx
Represents a subset of the fields of an [[EdgeTriplet]] or [[EdgeContext]].
TripletFields() - Constructor for class org.apache.spark.graphx.TripletFields
Constructs a default TripletFields in which all fields are included.
TripletFields(boolean, boolean, boolean) - Constructor for class org.apache.spark.graphx.TripletFields
 
triplets() - Method in class org.apache.spark.graphx.Graph
An RDD containing the edge triplets, which are edges along with the vertex data associated with the adjacent vertices.
triplets() - Method in class org.apache.spark.graphx.impl.GraphImpl
Return a RDD that brings edges together with their source and destination vertices.
truePositiveRate(double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns true positive rate for a given label (category)
trunc(Column, String) - Static method in class org.apache.spark.sql.functions
Returns date truncated to the unit specified by the format.
tryLog(Function0<T>) - Static method in class org.apache.spark.util.Utils
Executes the given block in a Try, logging any uncaught exceptions.
tryLogNonFatalError(Function0<BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Executes the given block.
tryOrExit(Function0<BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Execute a block of code that evaluates to Unit, forwarding any uncaught exceptions to the default UncaughtExceptionHandler
tryOrIOException(Function0<T>) - Static method in class org.apache.spark.util.Utils
Execute a block of code that returns a value, re-throwing any non-fatal uncaught exceptions as IOException.
tryOrStopSparkContext(SparkContext, Function0<BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Execute a block of code that evaluates to Unit, stop SparkContext is there is any uncaught exception
tryRecoverFromCheckpoint(String) - Method in class org.apache.spark.streaming.StreamingContextPythonHelper
This is a private method only for Python to implement getOrCreate.
tryWithResource(Function0<R>, Function1<R, T>) - Static method in class org.apache.spark.util.Utils
 
tryWithSafeFinally(Function0<T>, Function0<BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Execute a block of code, then a finally block, but if exceptions happen in the finally block, do not suppress the original exception.
tryWithSafeFinallyAndFailureCallbacks(Function0<T>, Function0<BoxedUnit>, Function0<BoxedUnit>) - Static method in class org.apache.spark.util.Utils
Execute a block of code and call the failure callbacks in the catch block.
tuple(Encoder<T1>, Encoder<T2>) - Static method in class org.apache.spark.sql.Encoders
An encoder for 2-ary tuples.
tuple(Encoder<T1>, Encoder<T2>, Encoder<T3>) - Static method in class org.apache.spark.sql.Encoders
An encoder for 3-ary tuples.
tuple(Encoder<T1>, Encoder<T2>, Encoder<T3>, Encoder<T4>) - Static method in class org.apache.spark.sql.Encoders
An encoder for 4-ary tuples.
tuple(Encoder<T1>, Encoder<T2>, Encoder<T3>, Encoder<T4>, Encoder<T5>) - Static method in class org.apache.spark.sql.Encoders
An encoder for 5-ary tuples.
tValues() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionTrainingSummary
T-statistic of estimated coefficients and intercept.
tValues() - Method in class org.apache.spark.ml.regression.LinearRegressionSummary
T-statistic of estimated coefficients and intercept.
TYPE() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
typed - Class in org.apache.spark.sql.expressions.javalang
:: Experimental :: Type-safe functions available for Dataset operations in Java.
typed() - Constructor for class org.apache.spark.sql.expressions.javalang.typed
 
typed - Class in org.apache.spark.sql.expressions.scalalang
:: Experimental :: Type-safe functions available for Dataset operations in Scala.
typed() - Constructor for class org.apache.spark.sql.expressions.scalalang.typed
 
TypedColumn<T,U> - Class in org.apache.spark.sql
A Column where an Encoder has been given for the expected input and return type.
TypedColumn(Expression, ExpressionEncoder<U>) - Constructor for class org.apache.spark.sql.TypedColumn
 
typeInfoConversions(DataType) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
typeName() - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
typeName() - Static method in class org.apache.spark.sql.types.ArrayType
 
typeName() - Static method in class org.apache.spark.sql.types.BinaryType
 
typeName() - Static method in class org.apache.spark.sql.types.BooleanType
 
typeName() - Static method in class org.apache.spark.sql.types.ByteType
 
typeName() - Static method in class org.apache.spark.sql.types.CalendarIntervalType
 
typeName() - Method in class org.apache.spark.sql.types.DataType
Name of the type used in JSON serialization.
typeName() - Static method in class org.apache.spark.sql.types.DateType
 
typeName() - Method in class org.apache.spark.sql.types.DecimalType
 
typeName() - Static method in class org.apache.spark.sql.types.DoubleType
 
typeName() - Static method in class org.apache.spark.sql.types.FloatType
 
typeName() - Static method in class org.apache.spark.sql.types.IntegerType
 
typeName() - Static method in class org.apache.spark.sql.types.LongType
 
typeName() - Static method in class org.apache.spark.sql.types.MapType
 
typeName() - Static method in class org.apache.spark.sql.types.NullType
 
typeName() - Static method in class org.apache.spark.sql.types.NumericType
 
typeName() - Static method in class org.apache.spark.sql.types.ShortType
 
typeName() - Static method in class org.apache.spark.sql.types.StringType
 
typeName() - Static method in class org.apache.spark.sql.types.StructType
 
typeName() - Static method in class org.apache.spark.sql.types.TimestampType
 

U

U() - Method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
udf(Function0<RT>, TypeTags.TypeTag<RT>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 0 arguments as user-defined function (UDF).
udf(Function1<A1, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 1 arguments as user-defined function (UDF).
udf(Function2<A1, A2, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 2 arguments as user-defined function (UDF).
udf(Function3<A1, A2, A3, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 3 arguments as user-defined function (UDF).
udf(Function4<A1, A2, A3, A4, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 4 arguments as user-defined function (UDF).
udf(Function5<A1, A2, A3, A4, A5, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 5 arguments as user-defined function (UDF).
udf(Function6<A1, A2, A3, A4, A5, A6, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 6 arguments as user-defined function (UDF).
udf(Function7<A1, A2, A3, A4, A5, A6, A7, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 7 arguments as user-defined function (UDF).
udf(Function8<A1, A2, A3, A4, A5, A6, A7, A8, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 8 arguments as user-defined function (UDF).
udf(Function9<A1, A2, A3, A4, A5, A6, A7, A8, A9, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 9 arguments as user-defined function (UDF).
udf(Function10<A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, RT>, TypeTags.TypeTag<RT>, TypeTags.TypeTag<A1>, TypeTags.TypeTag<A2>, TypeTags.TypeTag<A3>, TypeTags.TypeTag<A4>, TypeTags.TypeTag<A5>, TypeTags.TypeTag<A6>, TypeTags.TypeTag<A7>, TypeTags.TypeTag<A8>, TypeTags.TypeTag<A9>, TypeTags.TypeTag<A10>) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function of 10 arguments as user-defined function (UDF).
udf(Object, DataType) - Static method in class org.apache.spark.sql.functions
Defines a user-defined function (UDF) using a Scala closure.
udf() - Method in class org.apache.spark.sql.SparkSession
A collection of methods for registering user-defined functions (UDF).
udf() - Method in class org.apache.spark.sql.SQLContext
A collection of methods for registering user-defined functions (UDF).
UDF1<T1,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 1 arguments.
UDF10<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 10 arguments.
UDF11<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 11 arguments.
UDF12<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 12 arguments.
UDF13<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 13 arguments.
UDF14<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 14 arguments.
UDF15<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 15 arguments.
UDF16<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 16 arguments.
UDF17<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 17 arguments.
UDF18<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 18 arguments.
UDF19<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 19 arguments.
UDF2<T1,T2,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 2 arguments.
UDF20<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 20 arguments.
UDF21<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 21 arguments.
UDF22<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19,T20,T21,T22,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 22 arguments.
UDF3<T1,T2,T3,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 3 arguments.
UDF4<T1,T2,T3,T4,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 4 arguments.
UDF5<T1,T2,T3,T4,T5,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 5 arguments.
UDF6<T1,T2,T3,T4,T5,T6,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 6 arguments.
UDF7<T1,T2,T3,T4,T5,T6,T7,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 7 arguments.
UDF8<T1,T2,T3,T4,T5,T6,T7,T8,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 8 arguments.
UDF9<T1,T2,T3,T4,T5,T6,T7,T8,T9,R> - Interface in org.apache.spark.sql.api.java
A Spark SQL UDF that has 9 arguments.
UDFRegistration - Class in org.apache.spark.sql
Functions for registering user-defined functions.
UDTRegistration - Class in org.apache.spark.sql.types
This object keeps the mappings between user classes and their User Defined Types (UDTs).
UDTRegistration() - Constructor for class org.apache.spark.sql.types.UDTRegistration
 
uid() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
uid() - Method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
uid() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
uid() - Method in class org.apache.spark.ml.classification.GBTClassifier
 
uid() - Method in class org.apache.spark.ml.classification.LogisticRegression
 
uid() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
uid() - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
uid() - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
uid() - Method in class org.apache.spark.ml.classification.NaiveBayes
 
uid() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
uid() - Method in class org.apache.spark.ml.classification.OneVsRest
 
uid() - Method in class org.apache.spark.ml.classification.OneVsRestModel
 
uid() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
uid() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
uid() - Method in class org.apache.spark.ml.classification.RandomForestClassifier
 
uid() - Method in class org.apache.spark.ml.clustering.BisectingKMeans
 
uid() - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
uid() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
uid() - Method in class org.apache.spark.ml.clustering.GaussianMixture
 
uid() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
uid() - Method in class org.apache.spark.ml.clustering.KMeans
 
uid() - Method in class org.apache.spark.ml.clustering.KMeansModel
 
uid() - Method in class org.apache.spark.ml.clustering.LDA
 
uid() - Method in class org.apache.spark.ml.clustering.LDAModel
 
uid() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
uid() - Method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
uid() - Method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
uid() - Method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
uid() - Method in class org.apache.spark.ml.feature.Binarizer
 
uid() - Method in class org.apache.spark.ml.feature.Bucketizer
 
uid() - Method in class org.apache.spark.ml.feature.ChiSqSelector
 
uid() - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
uid() - Method in class org.apache.spark.ml.feature.ColumnPruner
 
uid() - Method in class org.apache.spark.ml.feature.CountVectorizer
 
uid() - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
uid() - Method in class org.apache.spark.ml.feature.DCT
 
uid() - Method in class org.apache.spark.ml.feature.ElementwiseProduct
 
uid() - Method in class org.apache.spark.ml.feature.HashingTF
 
uid() - Method in class org.apache.spark.ml.feature.IDF
 
uid() - Method in class org.apache.spark.ml.feature.IDFModel
 
uid() - Method in class org.apache.spark.ml.feature.IndexToString
 
uid() - Method in class org.apache.spark.ml.feature.Interaction
 
uid() - Method in class org.apache.spark.ml.feature.MaxAbsScaler
 
uid() - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
uid() - Method in class org.apache.spark.ml.feature.MinMaxScaler
 
uid() - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
uid() - Method in class org.apache.spark.ml.feature.NGram
 
uid() - Method in class org.apache.spark.ml.feature.Normalizer
 
uid() - Method in class org.apache.spark.ml.feature.OneHotEncoder
 
uid() - Method in class org.apache.spark.ml.feature.PCA
 
uid() - Method in class org.apache.spark.ml.feature.PCAModel
 
uid() - Method in class org.apache.spark.ml.feature.PolynomialExpansion
 
uid() - Method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
uid() - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
uid() - Method in class org.apache.spark.ml.feature.RFormula
 
uid() - Method in class org.apache.spark.ml.feature.RFormulaModel
 
uid() - Method in class org.apache.spark.ml.feature.SQLTransformer
 
uid() - Method in class org.apache.spark.ml.feature.StandardScaler
 
uid() - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
uid() - Method in class org.apache.spark.ml.feature.StopWordsRemover
 
uid() - Method in class org.apache.spark.ml.feature.StringIndexer
 
uid() - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
uid() - Method in class org.apache.spark.ml.feature.Tokenizer
 
uid() - Method in class org.apache.spark.ml.feature.VectorAssembler
 
uid() - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
uid() - Method in class org.apache.spark.ml.feature.VectorIndexer
 
uid() - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
uid() - Method in class org.apache.spark.ml.feature.VectorSlicer
 
uid() - Method in class org.apache.spark.ml.feature.Word2Vec
 
uid() - Method in class org.apache.spark.ml.feature.Word2VecModel
 
uid() - Method in class org.apache.spark.ml.Pipeline
 
uid() - Method in class org.apache.spark.ml.PipelineModel
 
uid() - Method in class org.apache.spark.ml.recommendation.ALS
 
uid() - Method in class org.apache.spark.ml.recommendation.ALSModel
 
uid() - Method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
uid() - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
uid() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.GBTRegressor
 
uid() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
uid() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.IsotonicRegression
 
uid() - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.LinearRegression
 
uid() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
uid() - Method in class org.apache.spark.ml.regression.RandomForestRegressor
 
uid() - Method in class org.apache.spark.ml.tuning.CrossValidator
 
uid() - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
uid() - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
uid() - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
uid() - Method in interface org.apache.spark.ml.util.Identifiable
An immutable unique ID for the object and its derivatives.
UIData - Class in org.apache.spark.ui.jobs
 
UIData() - Constructor for class org.apache.spark.ui.jobs.UIData
 
UIData.ExecutorSummary - Class in org.apache.spark.ui.jobs
 
UIData.ExecutorSummary() - Constructor for class org.apache.spark.ui.jobs.UIData.ExecutorSummary
 
UIData.ExecutorUIData - Class in org.apache.spark.ui.jobs
 
UIData.ExecutorUIData(long, Option<Object>, Option<String>) - Constructor for class org.apache.spark.ui.jobs.UIData.ExecutorUIData
 
UIData.ExecutorUIData$ - Class in org.apache.spark.ui.jobs
 
UIData.ExecutorUIData$() - Constructor for class org.apache.spark.ui.jobs.UIData.ExecutorUIData$
 
UIData.JobUIData - Class in org.apache.spark.ui.jobs
 
UIData.JobUIData(int, Option<Object>, Option<Object>, Seq<Object>, Option<String>, JobExecutionStatus, int, int, int, int, int, int, HashSet<Object>, int, int) - Constructor for class org.apache.spark.ui.jobs.UIData.JobUIData
 
UIData.JobUIData$ - Class in org.apache.spark.ui.jobs
 
UIData.JobUIData$() - Constructor for class org.apache.spark.ui.jobs.UIData.JobUIData$
 
UIData.StageUIData - Class in org.apache.spark.ui.jobs
 
UIData.StageUIData() - Constructor for class org.apache.spark.ui.jobs.UIData.StageUIData
 
UIData.TaskUIData - Class in org.apache.spark.ui.jobs
These are kept mutable and reused throughout a task's lifetime to avoid excessive reallocation.
UIData.TaskUIData(TaskInfo, Option<TaskMetrics>, Option<String>) - Constructor for class org.apache.spark.ui.jobs.UIData.TaskUIData
 
UIData.TaskUIData$ - Class in org.apache.spark.ui.jobs
 
UIData.TaskUIData$() - Constructor for class org.apache.spark.ui.jobs.UIData.TaskUIData$
 
uiRoot() - Static method in class org.apache.spark.ui.UIUtils
 
UIUtils - Class in org.apache.spark.streaming.ui
 
UIUtils() - Constructor for class org.apache.spark.streaming.ui.UIUtils
 
UIUtils - Class in org.apache.spark.ui
Utility functions for generating XML pages with spark content.
UIUtils() - Constructor for class org.apache.spark.ui.UIUtils
 
uiWebUrl() - Method in class org.apache.spark.SparkContext
 
UIWorkloadGenerator - Class in org.apache.spark.ui
Continuously generates jobs that expose various features of the WebUI (internal testing tool).
UIWorkloadGenerator() - Constructor for class org.apache.spark.ui.UIWorkloadGenerator
 
unapply(EdgeContext<VD, ED, A>) - Static method in class org.apache.spark.graphx.EdgeContext
Extractor mainly used for Graph#aggregateMessages*.
unapply(DenseVector) - Static method in class org.apache.spark.ml.linalg.DenseVector
Extracts the value array from a dense vector.
unapply(SparseVector) - Static method in class org.apache.spark.ml.linalg.SparseVector
 
unapply(DenseVector) - Static method in class org.apache.spark.mllib.linalg.DenseVector
Extracts the value array from a dense vector.
unapply(SparseVector) - Static method in class org.apache.spark.mllib.linalg.SparseVector
 
unapply(Column) - Static method in class org.apache.spark.sql.Column
 
unapply(Expression) - Method in class org.apache.spark.sql.types.DecimalType.Expression$
 
unapply(DecimalType) - Method in class org.apache.spark.sql.types.DecimalType.Fixed$
 
unapply(DataType) - Static method in class org.apache.spark.sql.types.DecimalType
 
unapply(Expression) - Static method in class org.apache.spark.sql.types.DecimalType
 
unapply(Expression) - Static method in class org.apache.spark.sql.types.NumericType
Enables matching against NumericType for expressions:
unapply(Broker) - Static method in class org.apache.spark.streaming.kafka.Broker
 
unapply(Throwable) - Static method in class org.apache.spark.util.CausedBy
 
unapply(String) - Static method in class org.apache.spark.util.IntParam
 
unapply(String) - Static method in class org.apache.spark.util.MemoryParam
 
UnaryTransformer<IN,OUT,T extends UnaryTransformer<IN,OUT,T>> - Class in org.apache.spark.ml
:: DeveloperApi :: Abstract class for transformers that take one input column, apply transformation, and output the result as a new column.
UnaryTransformer() - Constructor for class org.apache.spark.ml.UnaryTransformer
 
unbase64(Column) - Static method in class org.apache.spark.sql.functions
Decodes a BASE64 encoded string column and returns it as a binary column.
uncacheTable(String) - Method in class org.apache.spark.sql.catalog.Catalog
Removes the specified table from the in-memory cache.
uncacheTable(String) - Method in class org.apache.spark.sql.internal.CatalogImpl
Removes the specified table from the in-memory cache.
uncacheTable(String) - Method in class org.apache.spark.sql.SQLContext
Removes the specified table from the in-memory cache.
UNCAUGHT_EXCEPTION() - Static method in class org.apache.spark.util.SparkExitCode
The default uncaught exception handler was reached.
UNCAUGHT_EXCEPTION_TWICE() - Static method in class org.apache.spark.util.SparkExitCode
The default uncaught exception handler was called and an exception was encountered while
uncaughtException(Thread, Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
uncaughtException(Throwable) - Static method in class org.apache.spark.util.SparkUncaughtExceptionHandler
 
underlyingSplit() - Method in class org.apache.spark.scheduler.SplitInfo
 
unhandledFilters(Filter[]) - Method in class org.apache.spark.sql.sources.BaseRelation
Returns the list of Filters that this datasource may not be able to handle.
unhex(Column) - Static method in class org.apache.spark.sql.functions
Inverse of hex.
UniformGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
UniformGenerator() - Constructor for class org.apache.spark.mllib.random.UniformGenerator
 
uniformJavaRDD(JavaSparkContext, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
uniformJavaRDD(JavaSparkContext, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
uniformJavaRDD(JavaSparkContext, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
RandomRDDs.uniformJavaRDD(org.apache.spark.api.java.JavaSparkContext, long, int, long) with the default number of partitions and the default seed.
uniformJavaVectorRDD(JavaSparkContext, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
uniformJavaVectorRDD(JavaSparkContext, long, int, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
uniformJavaVectorRDD(JavaSparkContext, long, int) - Static method in class org.apache.spark.mllib.random.RandomRDDs
uniformRDD(SparkContext, long, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD comprised of i.i.d. samples from the uniform distribution U(0.0, 1.0).
uniformVectorRDD(SparkContext, long, int, int, long) - Static method in class org.apache.spark.mllib.random.RandomRDDs
Generates an RDD[Vector] with vectors containing i.i.d. samples drawn from the uniform distribution on U(0.0, 1.0).
union(JavaDoubleRDD) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Return the union of this RDD and another one.
union(JavaPairRDD<K, V>) - Method in class org.apache.spark.api.java.JavaPairRDD
Return the union of this RDD and another one.
union(JavaRDD<T>) - Method in class org.apache.spark.api.java.JavaRDD
Return the union of this RDD and another one.
union(JavaRDD<T>, List<JavaRDD<T>>) - Method in class org.apache.spark.api.java.JavaSparkContext
Build the union of two or more RDDs.
union(JavaPairRDD<K, V>, List<JavaPairRDD<K, V>>) - Method in class org.apache.spark.api.java.JavaSparkContext
Build the union of two or more RDDs.
union(JavaDoubleRDD, List<JavaDoubleRDD>) - Method in class org.apache.spark.api.java.JavaSparkContext
Build the union of two or more RDDs.
union(RDD<T>) - Static method in class org.apache.spark.api.r.RRDD
 
union(RDD<T>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
union(RDD<T>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
union(RDD<T>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
union(RDD<T>) - Static method in class org.apache.spark.graphx.VertexRDD
 
union(RDD<T>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
union(RDD<T>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
union(RDD<T>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
union(RDD<T>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
union(RDD<T>) - Method in class org.apache.spark.rdd.RDD
Return the union of this RDD and another one.
union(Seq<RDD<T>>, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Build the union of a list of RDDs.
union(RDD<T>, Seq<RDD<T>>, ClassTag<T>) - Method in class org.apache.spark.SparkContext
Build the union of a list of RDDs passed as variable-length arguments.
union(Dataset<T>) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset containing union of rows in this Dataset and another Dataset.
union(GenSeq<B>, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
union(JavaDStream<T>) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Return a new DStream by unifying data of another DStream with this DStream.
union(JavaDStream<T>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
union(JavaPairDStream<K, V>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream by unifying data of another DStream with this DStream.
union(JavaPairDStream<K, V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
union(JavaPairDStream<K, V>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
union(JavaDStream<T>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
union(JavaDStream<T>, List<JavaDStream<T>>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a unified DStream from multiple DStreams of the same type and same slide duration.
union(JavaPairDStream<K, V>, List<JavaPairDStream<K, V>>) - Method in class org.apache.spark.streaming.api.java.JavaStreamingContext
Create a unified DStream from multiple DStreams of the same type and same slide duration.
union(DStream<T>) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream by unifying data of another DStream with this DStream.
union(Seq<DStream<T>>, ClassTag<T>) - Method in class org.apache.spark.streaming.StreamingContext
Create a unified DStream from multiple DStreams of the same type and same slide duration.
unionAll(Dataset<T>) - Method in class org.apache.spark.sql.Dataset
Deprecated.
use union(). Since 2.0.0.
UnionRDD<T> - Class in org.apache.spark.rdd
 
UnionRDD(SparkContext, Seq<RDD<T>>, ClassTag<T>) - Constructor for class org.apache.spark.rdd.UnionRDD
 
uniqueId() - Method in class org.apache.spark.storage.StreamBlockId
 
unix_timestamp() - Static method in class org.apache.spark.sql.functions
Gets current Unix timestamp in seconds.
unix_timestamp(Column) - Static method in class org.apache.spark.sql.functions
Converts time string in format yyyy-MM-dd HH:mm:ss to Unix timestamp (in seconds), using the default timezone and the default locale, return null if fail.
unix_timestamp(Column, String) - Static method in class org.apache.spark.sql.functions
Convert time string with given pattern (see [http://docs.oracle.com/javase/tutorial/i18n/format/simpleDateFormat.html]) to Unix time stamp (in seconds), return null if fail.
UnknownReason - Class in org.apache.spark
:: DeveloperApi :: We don't know why the task ended -- for example, because of a ClassNotFound exception when deserializing the task result.
UnknownReason() - Constructor for class org.apache.spark.UnknownReason
 
UNLIMITED_DECIMAL_PRECISION() - Static method in class org.apache.spark.sql.hive.HiveShim
 
UNLIMITED_DECIMAL_SCALE() - Static method in class org.apache.spark.sql.hive.HiveShim
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.CLogLog$
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Identity$
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Inverse$
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Log$
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Logit$
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Probit$
 
unlink(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Sqrt$
 
unpersist() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist(boolean) - Method in class org.apache.spark.api.java.JavaDoubleRDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist() - Method in class org.apache.spark.api.java.JavaPairRDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist(boolean) - Method in class org.apache.spark.api.java.JavaPairRDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist() - Method in class org.apache.spark.api.java.JavaRDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist(boolean) - Method in class org.apache.spark.api.java.JavaRDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist(boolean) - Static method in class org.apache.spark.api.r.RRDD
 
unpersist() - Method in class org.apache.spark.broadcast.Broadcast
Asynchronously delete cached copies of this broadcast on the executors.
unpersist(boolean) - Method in class org.apache.spark.broadcast.Broadcast
Delete cached copies of this broadcast on the executors.
unpersist(boolean) - Static method in class org.apache.spark.graphx.EdgeRDD
 
unpersist(boolean) - Method in class org.apache.spark.graphx.Graph
Uncaches both vertices and edges of this graph.
unpersist(boolean) - Method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
unpersist(boolean) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
unpersist(boolean) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
unpersist(boolean) - Static method in class org.apache.spark.graphx.VertexRDD
 
unpersist() - Method in class org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
Unpersist intermediate RDDs used in the computation.
unpersist(boolean) - Static method in class org.apache.spark.rdd.HadoopRDD
 
unpersist(boolean) - Static method in class org.apache.spark.rdd.JdbcRDD
 
unpersist(boolean) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
unpersist(boolean) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
unpersist(boolean) - Method in class org.apache.spark.rdd.RDD
Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
unpersist(boolean) - Method in class org.apache.spark.sql.Dataset
Mark the Dataset as non-persistent, and remove all blocks for it from memory and disk.
unpersist() - Method in class org.apache.spark.sql.Dataset
Mark the Dataset as non-persistent, and remove all blocks for it from memory and disk.
unpersist$default$1() - Static method in class org.apache.spark.api.r.RRDD
 
unpersist$default$1() - Static method in class org.apache.spark.graphx.EdgeRDD
 
unpersist$default$1() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
unpersist$default$1() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
unpersist$default$1() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
unpersist$default$1() - Static method in class org.apache.spark.graphx.VertexRDD
 
unpersist$default$1() - Static method in class org.apache.spark.rdd.HadoopRDD
 
unpersist$default$1() - Static method in class org.apache.spark.rdd.JdbcRDD
 
unpersist$default$1() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
unpersist$default$1() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
unpersistRDDFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
unpersistRDDToJson(SparkListenerUnpersistRDD) - Static method in class org.apache.spark.util.JsonProtocol
 
unpersistVertices(boolean) - Method in class org.apache.spark.graphx.Graph
Uncaches only the vertices of this graph, leaving the edges alone.
unpersistVertices(boolean) - Method in class org.apache.spark.graphx.impl.GraphImpl
 
unpersistVertices$default$1() - Static method in class org.apache.spark.graphx.impl.GraphImpl
 
unregister(QueryExecutionListener) - Method in class org.apache.spark.sql.util.ExecutionListenerManager
Unregisters the specified QueryExecutionListener.
unregisterDialect(JdbcDialect) - Static method in class org.apache.spark.sql.jdbc.JdbcDialects
Unregister a dialect.
Unresolved() - Static method in class org.apache.spark.ml.attribute.AttributeType
Unresolved type.
UnresolvedAttribute - Class in org.apache.spark.ml.attribute
:: DeveloperApi :: An unresolved attribute.
UnresolvedAttribute() - Constructor for class org.apache.spark.ml.attribute.UnresolvedAttribute
 
unset(String) - Method in class org.apache.spark.sql.RuntimeConfig
Resets the configuration property for the given key.
unset() - Static method in class org.apache.spark.TaskContext
Unset the thread local TaskContext.
until(Time, Duration) - Method in class org.apache.spark.streaming.Time
 
untilOffset() - Method in class org.apache.spark.streaming.kafka.OffsetRange
 
unwrap(Object, ObjectInspector) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
unwrapOrcStructs(Configuration, StructType, Option<StructObjectInspector>, Iterator<Writable>) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
unwrapperFor(StructField) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
unzip(Function1<A, Tuple2<A1, A2>>) - Static method in class org.apache.spark.sql.types.StructType
 
unzip3(Function1<A, Tuple3<A1, A2, A3>>) - Static method in class org.apache.spark.sql.types.StructType
 
update(int, int, double) - Method in interface org.apache.spark.ml.linalg.Matrix
Update element at (i, j)
update(Function1<Object, Object>) - Method in interface org.apache.spark.ml.linalg.Matrix
Update all the values of this matrix using the function f.
update(RDD<Vector>, double, String) - Method in class org.apache.spark.mllib.clustering.StreamingKMeansModel
Perform a k-means update on a batch of data.
update(int, int, double) - Method in interface org.apache.spark.mllib.linalg.Matrix
Update element at (i, j)
update(Function1<Object, Object>) - Method in interface org.apache.spark.mllib.linalg.Matrix
Update all the values of this matrix using the function f.
update() - Method in class org.apache.spark.scheduler.AccumulableInfo
 
update(int, Object) - Method in class org.apache.spark.sql.expressions.MutableAggregationBuffer
Update the ith value of this buffer.
update(MutableAggregationBuffer, Row) - Method in class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
Updates the given aggregation buffer buffer with new input data from input.
update() - Method in class org.apache.spark.status.api.v1.AccumulableInfo
 
update(S) - Method in class org.apache.spark.streaming.State
Update the state with a new value.
update(T1, T2) - Method in class org.apache.spark.util.MutablePair
Updates this pair with new values and returns itself
updateAggregateMetrics(UIData.StageUIData, String, TaskMetrics, Option<TaskMetrics>) - Method in class org.apache.spark.ui.jobs.JobProgressListener
Upon receiving new metrics for a task, updates the per-stage and per-executor-per-stage aggregate metrics by calculating deltas between the currently recorded metrics and the new metrics.
updated(int, B, CanBuildFrom<Repr, B, That>) - Static method in class org.apache.spark.sql.types.StructType
 
UPDATED_BLOCK_STATUSES() - Static method in class org.apache.spark.InternalAccumulator
 
updatePrediction(Vector, double, DecisionTreeRegressionModel, double) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Add prediction from a new boosting iteration to an existing prediction.
updatePredictionError(RDD<LabeledPoint>, RDD<Tuple2<Object, Object>>, double, DecisionTreeRegressionModel, Loss) - Static method in class org.apache.spark.ml.tree.impl.GradientBoostedTrees
Update a zipped predictionError RDD (as obtained with computeInitialPredictionAndError)
updatePredictionError(RDD<LabeledPoint>, RDD<Tuple2<Object, Object>>, double, DecisionTreeModel, Loss) - Static method in class org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
:: DeveloperApi :: Update a zipped predictionError RDD (as obtained with computeInitialPredictionAndError)
Updater - Class in org.apache.spark.mllib.optimization
:: DeveloperApi :: Class used to perform steps (weight update) using Gradient Descent methods.
Updater() - Constructor for class org.apache.spark.mllib.optimization.Updater
 
updateRddInfo(Seq<RDDInfo>, Seq<StorageStatus>) - Static method in class org.apache.spark.storage.StorageUtils
Update the given list of RDDInfo with the given list of storage statuses.
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of each key.
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, int) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of each key.
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, Partitioner) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of the key.
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, Partitioner, JavaPairRDD<K, S>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of the key.
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, Partitioner, JavaPairRDD<K, S>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, int) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, Partitioner) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
updateStateByKey(Function2<List<V>, Optional<S>, Optional<S>>, Partitioner, JavaPairRDD<K, S>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
updateStateByKey(Function2<Seq<V>, Option<S>, Option<S>>, ClassTag<S>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of each key.
updateStateByKey(Function2<Seq<V>, Option<S>, Option<S>>, int, ClassTag<S>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of each key.
updateStateByKey(Function2<Seq<V>, Option<S>, Option<S>>, Partitioner, ClassTag<S>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of the key.
updateStateByKey(Function1<Iterator<Tuple3<K, Seq<V>, Option<S>>>, Iterator<Tuple2<K, S>>>, Partitioner, boolean, ClassTag<S>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of each key.
updateStateByKey(Function2<Seq<V>, Option<S>, Option<S>>, Partitioner, RDD<Tuple2<K, S>>, ClassTag<S>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of the key.
updateStateByKey(Function1<Iterator<Tuple3<K, Seq<V>, Option<S>>>, Iterator<Tuple2<K, S>>>, Partitioner, boolean, RDD<Tuple2<K, S>>, ClassTag<S>) - Method in class org.apache.spark.streaming.dstream.PairDStreamFunctions
Return a new "state" DStream where the state for each key is updated by applying the given function on the previous state of the key and the new values of each key.
upper(Column) - Static method in class org.apache.spark.sql.functions
Converts a string column to upper case.
useDisk() - Method in class org.apache.spark.storage.StorageLevel
 
useDst - Variable in class org.apache.spark.graphx.TripletFields
Indicates whether the destination vertex attribute is included.
useEdge - Variable in class org.apache.spark.graphx.TripletFields
Indicates whether the edge attribute is included.
useMemory() - Method in class org.apache.spark.storage.StorageLevel
 
useNodeIdCache() - Method in class org.apache.spark.mllib.tree.configuration.Strategy
 
useOffHeap() - Method in class org.apache.spark.storage.StorageLevel
 
user() - Method in class org.apache.spark.ml.recommendation.ALS.Rating
 
user() - Method in class org.apache.spark.mllib.recommendation.Rating
 
USER_DEFAULT() - Static method in class org.apache.spark.sql.types.DecimalType
 
userClass() - Method in class org.apache.spark.mllib.linalg.VectorUDT
 
userCol() - Static method in class org.apache.spark.ml.recommendation.ALS
 
userCol() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
UserDefinedAggregateFunction - Class in org.apache.spark.sql.expressions
:: Experimental :: The base class for implementing user-defined aggregate functions (UDAF).
UserDefinedAggregateFunction() - Constructor for class org.apache.spark.sql.expressions.UserDefinedAggregateFunction
 
UserDefinedFunction - Class in org.apache.spark.sql.expressions
A user-defined function.
UserDefinedFunction(Object, DataType, Option<Seq<DataType>>) - Constructor for class org.apache.spark.sql.expressions.UserDefinedFunction
 
userFactors() - Method in class org.apache.spark.ml.recommendation.ALSModel
 
userFeatures() - Method in class org.apache.spark.mllib.recommendation.MatrixFactorizationModel
 
useSrc - Variable in class org.apache.spark.graphx.TripletFields
Indicates whether the source vertex attribute is included.
Utils - Class in org.apache.spark.ml.impl
 
Utils() - Constructor for class org.apache.spark.ml.impl.Utils
 
Utils - Class in org.apache.spark.util
Various utility methods used by Spark.
Utils() - Constructor for class org.apache.spark.util.Utils
 
UUIDFromJson(JsonAST.JValue) - Static method in class org.apache.spark.util.JsonProtocol
 
UUIDToJson(UUID) - Static method in class org.apache.spark.util.JsonProtocol
 

V

V() - Method in class org.apache.spark.mllib.linalg.SingularValueDecomposition
 
validate() - Method in class org.apache.spark.mllib.linalg.distributed.BlockMatrix
Validates the block matrix info against the matrix data (blocks) and throws an exception if any error is found.
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.OneVsRest
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.KMeans
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.LDA
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.IDF
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.IDFModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.StringIndexer
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.Word2Vec
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
validateAndTransformSchema(StructType) - Static method in class org.apache.spark.ml.recommendation.ALS
 
validateAndTransformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
validateAndTransformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
validateAndTransformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
validateAndTransformSchema(StructType, boolean) - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.LinearRegression
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
validateAndTransformSchema(StructType, boolean, DataType) - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
validateData() - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
validateData() - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
validateData() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
 
validateData() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
validateData() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
validateData() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
validateData_$eq(boolean) - Static method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
validateData_$eq(boolean) - Static method in class org.apache.spark.mllib.classification.SVMWithSGD
 
validateData_$eq(boolean) - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
validateData_$eq(boolean) - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
validateData_$eq(boolean) - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
validateInputType(DataType) - Method in class org.apache.spark.ml.feature.DCT
 
validateInputType(DataType) - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
validateInputType(DataType) - Method in class org.apache.spark.ml.feature.NGram
 
validateInputType(DataType) - Static method in class org.apache.spark.ml.feature.Normalizer
 
validateInputType(DataType) - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
validateInputType(DataType) - Method in class org.apache.spark.ml.feature.RegexTokenizer
 
validateInputType(DataType) - Method in class org.apache.spark.ml.feature.Tokenizer
 
validateInputType(DataType) - Method in class org.apache.spark.ml.UnaryTransformer
Validates the input type.
validateParams() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
validateParams() - Static method in class org.apache.spark.ml.classification.GBTClassificationModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
validateParams() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
validateParams() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
validateParams() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
validateParams() - Static method in class org.apache.spark.ml.classification.NaiveBayesModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.OneVsRest
 
validateParams() - Static method in class org.apache.spark.ml.classification.OneVsRestModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.ProbabilisticClassificationModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
validateParams() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
validateParams() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
validateParams() - Static method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
validateParams() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
validateParams() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
validateParams() - Static method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
validateParams() - Static method in class org.apache.spark.ml.clustering.KMeans
 
validateParams() - Static method in class org.apache.spark.ml.clustering.KMeansModel
 
validateParams() - Static method in class org.apache.spark.ml.clustering.LDA
 
validateParams() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
validateParams() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
validateParams() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
validateParams() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
validateParams() - Static method in class org.apache.spark.ml.feature.Binarizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
validateParams() - Static method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.ColumnPruner
 
validateParams() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.DCT
 
validateParams() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
validateParams() - Static method in class org.apache.spark.ml.feature.HashingTF
 
validateParams() - Static method in class org.apache.spark.ml.feature.IDF
 
validateParams() - Static method in class org.apache.spark.ml.feature.IDFModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.IndexToString
 
validateParams() - Static method in class org.apache.spark.ml.feature.Interaction
 
validateParams() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
validateParams() - Static method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
validateParams() - Static method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.NGram
 
validateParams() - Static method in class org.apache.spark.ml.feature.Normalizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
validateParams() - Static method in class org.apache.spark.ml.feature.PCA
 
validateParams() - Static method in class org.apache.spark.ml.feature.PCAModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
validateParams() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.RFormula
 
validateParams() - Static method in class org.apache.spark.ml.feature.RFormulaModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
validateParams() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
validateParams() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
validateParams() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
validateParams() - Static method in class org.apache.spark.ml.feature.StringIndexerModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
validateParams() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
validateParams() - Static method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
validateParams() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
validateParams() - Static method in class org.apache.spark.ml.feature.VectorIndexerModel
 
validateParams() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
validateParams() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
validateParams() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
validateParams() - Method in interface org.apache.spark.ml.param.Params
Deprecated.
Will be removed in 2.1.0. All the checks should be merged into transformSchema
validateParams() - Static method in class org.apache.spark.ml.Pipeline
 
validateParams() - Static method in class org.apache.spark.ml.PipelineModel
 
validateParams() - Static method in class org.apache.spark.ml.recommendation.ALS
 
validateParams() - Static method in class org.apache.spark.ml.recommendation.ALSModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
validateParams() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
validateParams() - Static method in class org.apache.spark.ml.regression.GBTRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
validateParams() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
validateParams() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
validateParams() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
validateParams() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
validateParams() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
validateParams() - Static method in class org.apache.spark.ml.tuning.CrossValidator
 
validateParams() - Static method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
validateParams() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
validateParams() - Static method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
validateStages(PipelineStage[]) - Method in class org.apache.spark.ml.Pipeline.SharedReadWrite$
Check that all stages are Writable
validationMetrics() - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
validationTol() - Method in class org.apache.spark.mllib.tree.configuration.BoostingStrategy
 
validators() - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
 
validators() - Method in class org.apache.spark.mllib.classification.LogisticRegressionWithSGD
Deprecated.
 
validators() - Method in class org.apache.spark.mllib.classification.SVMWithSGD
 
validators() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm
 
validators() - Static method in class org.apache.spark.mllib.regression.LassoWithSGD
Deprecated.
 
validators() - Static method in class org.apache.spark.mllib.regression.LinearRegressionWithSGD
Deprecated.
 
validators() - Static method in class org.apache.spark.mllib.regression.RidgeRegressionWithSGD
Deprecated.
 
validConstraints() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
value() - Method in class org.apache.spark.Accumulable
Deprecated.
Access the accumulator's current value; only allowed on driver.
value() - Static method in class org.apache.spark.Accumulator
Deprecated.
 
value() - Method in class org.apache.spark.broadcast.Broadcast
Get the broadcasted value.
value() - Method in class org.apache.spark.ComplexFutureAction
 
value() - Method in interface org.apache.spark.FutureAction
The value of this Future.
value() - Method in class org.apache.spark.ml.param.ParamPair
 
value() - Method in class org.apache.spark.mllib.linalg.distributed.MatrixEntry
 
value() - Method in class org.apache.spark.mllib.stat.test.BinarySample
 
Value() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
Value(int) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
Value(String) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
Value(int, String) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
Value() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
Value(int) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
Value(String) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
Value(int, String) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
Value() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
Value(int) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
Value(String) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
Value(int, String) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
Value() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
Value(int) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
Value(String) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
Value(int, String) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
Value() - Static method in class org.apache.spark.rdd.CheckpointState
 
Value(int) - Static method in class org.apache.spark.rdd.CheckpointState
 
Value(String) - Static method in class org.apache.spark.rdd.CheckpointState
 
Value(int, String) - Static method in class org.apache.spark.rdd.CheckpointState
 
value() - Method in class org.apache.spark.scheduler.AccumulableInfo
 
Value() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
Value(int) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
Value(String) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
Value(int, String) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
Value() - Static method in class org.apache.spark.scheduler.TaskLocality
 
Value(int) - Static method in class org.apache.spark.scheduler.TaskLocality
 
Value(String) - Static method in class org.apache.spark.scheduler.TaskLocality
 
Value(int, String) - Static method in class org.apache.spark.scheduler.TaskLocality
 
value() - Method in class org.apache.spark.SerializableWritable
 
value() - Method in class org.apache.spark.SimpleFutureAction
 
value() - Method in class org.apache.spark.sql.sources.EqualNullSafe
 
value() - Method in class org.apache.spark.sql.sources.EqualTo
 
value() - Method in class org.apache.spark.sql.sources.GreaterThan
 
value() - Method in class org.apache.spark.sql.sources.GreaterThanOrEqual
 
value() - Method in class org.apache.spark.sql.sources.LessThan
 
value() - Method in class org.apache.spark.sql.sources.LessThanOrEqual
 
value() - Method in class org.apache.spark.sql.sources.StringContains
 
value() - Method in class org.apache.spark.sql.sources.StringEndsWith
 
value() - Method in class org.apache.spark.sql.sources.StringStartsWith
 
value() - Method in class org.apache.spark.status.api.v1.AccumulableInfo
 
value() - Method in class org.apache.spark.storage.memory.DeserializedMemoryEntry
 
Value() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
Value(int) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
Value(String) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
Value(int, String) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
Value() - Static method in class org.apache.spark.TaskState
 
Value(int) - Static method in class org.apache.spark.TaskState
 
Value(String) - Static method in class org.apache.spark.TaskState
 
Value(int, String) - Static method in class org.apache.spark.TaskState
 
value() - Method in class org.apache.spark.util.AccumulatorV2
Defines the current value of this accumulator
value() - Method in class org.apache.spark.util.DoubleAccumulator
 
value() - Method in class org.apache.spark.util.LegacyAccumulatorWrapper
 
value() - Method in class org.apache.spark.util.ListAccumulator
 
value() - Method in class org.apache.spark.util.LongAccumulator
 
value_$eq(R) - Static method in class org.apache.spark.Accumulator
Deprecated.
 
valueContainsNull() - Method in class org.apache.spark.sql.types.MapType
 
valueOf(String) - Static method in enum org.apache.spark.graphx.impl.EdgeActiveness
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.JobExecutionStatus
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.launcher.SparkAppHandle.State
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.sql.SaveMode
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.status.api.v1.ApplicationStatus
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.status.api.v1.StageStatus
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.status.api.v1.TaskSorting
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.streaming.StreamingContextState
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.util.sketch.BloomFilter.Version
Returns the enum constant of this type with the specified name.
valueOf(String) - Static method in enum org.apache.spark.util.sketch.CountMinSketch.Version
Returns the enum constant of this type with the specified name.
values() - Method in class org.apache.spark.api.java.JavaPairRDD
Return an RDD with the values of each tuple.
values() - Static method in enum org.apache.spark.graphx.impl.EdgeActiveness
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in enum org.apache.spark.JobExecutionStatus
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in enum org.apache.spark.launcher.SparkAppHandle.State
Returns an array containing the constants of this enum type, in the order they are declared.
VALUES() - Static method in class org.apache.spark.ml.attribute.AttributeKeys
 
values() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
values() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
values() - Method in class org.apache.spark.ml.linalg.DenseMatrix
 
values() - Method in class org.apache.spark.ml.linalg.DenseVector
 
values() - Method in class org.apache.spark.ml.linalg.SparseMatrix
 
values() - Method in class org.apache.spark.ml.linalg.SparseVector
 
values() - Method in class org.apache.spark.mllib.linalg.DenseMatrix
 
values() - Method in class org.apache.spark.mllib.linalg.DenseVector
 
values() - Method in class org.apache.spark.mllib.linalg.SparseMatrix
 
values() - Method in class org.apache.spark.mllib.linalg.SparseVector
 
values() - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
values() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
values() - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
values() - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
values() - Static method in class org.apache.spark.rdd.CheckpointState
 
values() - Method in class org.apache.spark.rdd.PairRDDFunctions
Return an RDD with the values of each tuple.
values() - Static method in class org.apache.spark.scheduler.SchedulingMode
 
values() - Static method in class org.apache.spark.scheduler.TaskLocality
 
values() - Static method in enum org.apache.spark.sql.SaveMode
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Method in class org.apache.spark.sql.sources.In
 
values() - Static method in enum org.apache.spark.status.api.v1.ApplicationStatus
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in enum org.apache.spark.status.api.v1.StageStatus
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in enum org.apache.spark.status.api.v1.TaskSorting
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
values() - Static method in enum org.apache.spark.streaming.StreamingContextState
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in class org.apache.spark.TaskState
 
values() - Static method in enum org.apache.spark.util.sketch.BloomFilter.Version
Returns an array containing the constants of this enum type, in the order they are declared.
values() - Static method in enum org.apache.spark.util.sketch.CountMinSketch.Version
Returns an array containing the constants of this enum type, in the order they are declared.
valueType() - Method in class org.apache.spark.sql.types.MapType
 
var_pop(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the population variance of the values in a group.
var_pop(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the population variance of the values in a group.
var_samp(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the unbiased variance of the values in a group.
var_samp(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: returns the unbiased variance of the values in a group.
VariableSubstitution - Class in org.apache.spark.sql.internal
A helper class that enables substitution using syntax like ${var}, ${system:var} and ${env:var}.
VariableSubstitution(SQLConf) - Constructor for class org.apache.spark.sql.internal.VariableSubstitution
 
variance() - Method in class org.apache.spark.api.java.JavaDoubleRDD
Compute the variance of this RDD's elements.
variance(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Binomial$
 
variance(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gamma$
 
variance(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Gaussian$
 
variance(double) - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegression.Poisson$
 
variance() - Method in class org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
Unbiased estimate of sample variance of each dimension.
variance() - Method in interface org.apache.spark.mllib.stat.MultivariateStatisticalSummary
Sample variance vector.
Variance - Class in org.apache.spark.mllib.tree.impurity
:: Experimental :: Class for calculating variance during regression
Variance() - Constructor for class org.apache.spark.mllib.tree.impurity.Variance
 
variance() - Method in class org.apache.spark.rdd.DoubleRDDFunctions
Compute the variance of this RDD's elements.
variance(Column) - Static method in class org.apache.spark.sql.functions
Aggregate function: alias for var_samp.
variance(String) - Static method in class org.apache.spark.sql.functions
Aggregate function: alias for var_samp.
variance() - Method in class org.apache.spark.util.StatCounter
Return the variance of the values.
varianceCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
varianceCol() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
vClassTag() - Method in class org.apache.spark.api.java.JavaHadoopRDD
 
vClassTag() - Method in class org.apache.spark.api.java.JavaNewHadoopRDD
 
vClassTag() - Method in class org.apache.spark.api.java.JavaPairRDD
 
vClassTag() - Method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
vClassTag() - Method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
vdTag() - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
vdTag() - Method in class org.apache.spark.graphx.VertexRDD
 
Vector - Interface in org.apache.spark.ml.linalg
Represents a numeric vector, whose index type is Int and value type is Double.
vector() - Method in class org.apache.spark.mllib.linalg.distributed.IndexedRow
 
Vector - Interface in org.apache.spark.mllib.linalg
Represents a numeric vector, whose index type is Int and value type is Double.
VectorAssembler - Class in org.apache.spark.ml.feature
:: Experimental :: A feature transformer that merges multiple columns into a vector column.
VectorAssembler(String) - Constructor for class org.apache.spark.ml.feature.VectorAssembler
 
VectorAssembler() - Constructor for class org.apache.spark.ml.feature.VectorAssembler
 
VectorAttributeRewriter - Class in org.apache.spark.ml.feature
Utility transformer that rewrites Vector attribute names via prefix replacement.
VectorAttributeRewriter(String, String, Map<String, String>) - Constructor for class org.apache.spark.ml.feature.VectorAttributeRewriter
 
VectorAttributeRewriter(String, Map<String, String>) - Constructor for class org.apache.spark.ml.feature.VectorAttributeRewriter
 
vectorCol() - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
VectorImplicits - Class in org.apache.spark.mllib.linalg
Implicit methods available in Scala for converting Vector to Vector and vice versa.
VectorImplicits() - Constructor for class org.apache.spark.mllib.linalg.VectorImplicits
 
VectorIndexer - Class in org.apache.spark.ml.feature
:: Experimental :: Class for indexing categorical feature columns in a dataset of Vector.
VectorIndexer(String) - Constructor for class org.apache.spark.ml.feature.VectorIndexer
 
VectorIndexer() - Constructor for class org.apache.spark.ml.feature.VectorIndexer
 
VectorIndexerModel - Class in org.apache.spark.ml.feature
:: Experimental :: Transform categorical features to use 0-based indices instead of their original values.
Vectors - Class in org.apache.spark.ml.linalg
Factory methods for Vector.
Vectors() - Constructor for class org.apache.spark.ml.linalg.Vectors
 
Vectors - Class in org.apache.spark.mllib.linalg
Factory methods for Vector.
Vectors() - Constructor for class org.apache.spark.mllib.linalg.Vectors
 
vectorSize() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
vectorSize() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
VectorSlicer - Class in org.apache.spark.ml.feature
:: Experimental :: This class takes a feature vector and outputs a new feature vector with a subarray of the original features.
VectorSlicer(String) - Constructor for class org.apache.spark.ml.feature.VectorSlicer
 
VectorSlicer() - Constructor for class org.apache.spark.ml.feature.VectorSlicer
 
VectorTransformer - Interface in org.apache.spark.mllib.feature
:: DeveloperApi :: Trait for transformation of a vector
VectorUDT - Class in org.apache.spark.mllib.linalg
:: AlphaComponent ::
VectorUDT() - Constructor for class org.apache.spark.mllib.linalg.VectorUDT
 
version() - Method in class org.apache.spark.api.java.JavaSparkContext
The version of Spark on which this application is running.
version() - Method in class org.apache.spark.io.SnappyCompressionCodec
 
version() - Method in class org.apache.spark.SparkContext
The version of Spark on which this application is running.
VersionInfo - Class in org.apache.spark.status.api.v1
 
vertcat(Matrix[]) - Static method in class org.apache.spark.ml.linalg.Matrices
Vertically concatenate a sequence of matrices.
vertcat(Matrix[]) - Static method in class org.apache.spark.mllib.linalg.Matrices
Vertically concatenate a sequence of matrices.
vertexAttr(long) - Method in class org.apache.spark.graphx.EdgeTriplet
Get the vertex object for the given vertex in the edge.
VertexRDD<VD> - Class in org.apache.spark.graphx
Extends RDD[(VertexId, VD)] by ensuring that there is only one entry for each vertex and by pre-indexing the entries for fast, efficient joins.
VertexRDD(SparkContext, Seq<Dependency<?>>) - Constructor for class org.apache.spark.graphx.VertexRDD
 
VertexRDDImpl<VD> - Class in org.apache.spark.graphx.impl
 
vertices() - Method in class org.apache.spark.graphx.Graph
An RDD containing the vertices and their associated attributes.
vertices() - Method in class org.apache.spark.graphx.impl.GraphImpl
 
view() - Static method in class org.apache.spark.sql.types.StructType
 
view(int, int) - Static method in class org.apache.spark.sql.types.StructType
 
visit(int, int, String, String, String, String[]) - Method in class org.apache.spark.util.InnerClosureFinder
 
visitMethod(int, String, String, String, String[]) - Method in class org.apache.spark.util.InnerClosureFinder
 
visitMethod(int, String, String, String, String[]) - Method in class org.apache.spark.util.ReturnStatementFinder
 
vizHeaderNodes() - Static method in class org.apache.spark.ui.UIUtils
 
vManifest() - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
vManifest() - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
vManifest() - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
vocabSize() - Static method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
vocabSize() - Method in class org.apache.spark.ml.clustering.LDAModel
 
vocabSize() - Static method in class org.apache.spark.ml.clustering.LocalLDAModel
 
vocabSize() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
vocabSize() - Static method in class org.apache.spark.ml.feature.CountVectorizerModel
 
vocabSize() - Method in class org.apache.spark.mllib.clustering.DistributedLDAModel
 
vocabSize() - Method in class org.apache.spark.mllib.clustering.LDAModel
Vocabulary size (number of terms or terms in the vocabulary)
vocabSize() - Method in class org.apache.spark.mllib.clustering.LocalLDAModel
 
vocabulary() - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
VocabWord - Class in org.apache.spark.mllib.feature
Entry in vocabulary
VocabWord(String, int, int[], int[], int) - Constructor for class org.apache.spark.mllib.feature.VocabWord
 
VoidFunction<T> - Interface in org.apache.spark.api.java.function
A function with no return value.
VoidFunction2<T1,T2> - Interface in org.apache.spark.api.java.function
A two-argument function that takes arguments of type T1 and T2 with no return value.
Vote() - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 

W

w(boolean) - Method in class org.apache.spark.ml.param.BooleanParam
Creates a param pair with the given value (for Java).
w(List<Double>) - Method in class org.apache.spark.ml.param.DoubleArrayParam
Creates a param pair with a List of values (for Java and Python).
w(double) - Method in class org.apache.spark.ml.param.DoubleParam
Creates a param pair with the given value (for Java).
w(float) - Method in class org.apache.spark.ml.param.FloatParam
Creates a param pair with the given value (for Java).
w(List<Integer>) - Method in class org.apache.spark.ml.param.IntArrayParam
Creates a param pair with a List of values (for Java and Python).
w(int) - Method in class org.apache.spark.ml.param.IntParam
Creates a param pair with the given value (for Java).
w(long) - Method in class org.apache.spark.ml.param.LongParam
Creates a param pair with the given value (for Java).
w(T) - Method in class org.apache.spark.ml.param.Param
Creates a param pair with the given value (for Java).
w(List<String>) - Method in class org.apache.spark.ml.param.StringArrayParam
Creates a param pair with a List of values (for Java and Python).
waitForProcess(Process, long) - Static method in class org.apache.spark.util.Utils
Wait for a process to terminate for at most the specified duration.
waitForSubqueries() - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
warmUp(SparkContext) - Static method in class org.apache.spark.streaming.util.RawTextHelper
Warms up the SparkContext in master and slave by running tasks to force JIT kick in before real workload starts.
weekofyear(Column) - Static method in class org.apache.spark.sql.functions
Extracts the week number as an integer from a given date/timestamp/string.
WeibullGenerator - Class in org.apache.spark.mllib.random
:: DeveloperApi :: Generates i.i.d.
WeibullGenerator(double, double) - Constructor for class org.apache.spark.mllib.random.WeibullGenerator
 
weightCol() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
weightCol() - Static method in class org.apache.spark.ml.classification.LogisticRegressionModel
 
weightCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
weightCol() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
weightCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
weightCol() - Static method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
weightCol() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
weightCol() - Static method in class org.apache.spark.ml.regression.LinearRegressionModel
 
weightedFalsePositiveRate() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns weighted false positive rate
weightedFMeasure(double) - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns weighted averaged f-measure
weightedFMeasure() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns weighted averaged f1-measure
weightedPrecision() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns weighted averaged precision
weightedRecall() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns weighted averaged recall (equals to precision, recall and f-measure)
weightedTruePositiveRate() - Method in class org.apache.spark.mllib.evaluation.MulticlassMetrics
Returns weighted true positive rate (equals to precision, recall and f-measure)
weights() - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
weights() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
weights() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
weights() - Method in class org.apache.spark.mllib.classification.impl.GLMClassificationModel.SaveLoadV1_0$.Data
 
weights() - Method in class org.apache.spark.mllib.classification.LogisticRegressionModel
 
weights() - Method in class org.apache.spark.mllib.classification.SVMModel
 
weights() - Method in class org.apache.spark.mllib.clustering.ExpectationSum
 
weights() - Method in class org.apache.spark.mllib.clustering.GaussianMixtureModel
 
weights() - Method in class org.apache.spark.mllib.regression.GeneralizedLinearModel
 
weights() - Method in class org.apache.spark.mllib.regression.impl.GLMRegressionModel.SaveLoadV1_0$.Data
 
weights() - Method in class org.apache.spark.mllib.regression.LassoModel
 
weights() - Method in class org.apache.spark.mllib.regression.LinearRegressionModel
 
weights() - Method in class org.apache.spark.mllib.regression.RidgeRegressionModel
 
WelchTTest - Class in org.apache.spark.mllib.stat.test
Performs Welch's 2-sample t-test.
WelchTTest() - Constructor for class org.apache.spark.mllib.stat.test.WelchTTest
 
when(Column, Object) - Method in class org.apache.spark.sql.Column
Evaluates a list of conditions and returns one of multiple possible result expressions.
when(Column, Object) - Static method in class org.apache.spark.sql.functions
Evaluates a list of conditions and returns one of multiple possible result expressions.
where(Column) - Method in class org.apache.spark.sql.Dataset
Filters rows using the given condition.
where(String) - Method in class org.apache.spark.sql.Dataset
Filters rows using the given SQL expression.
whiteSpace() - Static method in class org.apache.spark.ml.feature.RFormulaParser
 
wholeTextFiles(String, int) - Method in class org.apache.spark.api.java.JavaSparkContext
Read a directory of text files from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI.
wholeTextFiles(String) - Method in class org.apache.spark.api.java.JavaSparkContext
Read a directory of text files from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI.
wholeTextFiles(String, int) - Method in class org.apache.spark.SparkContext
Read a directory of text files from HDFS, a local file system (available on all nodes), or any Hadoop-supported file system URI.
width() - Method in class org.apache.spark.util.sketch.CountMinSketch
Width of this CountMinSketch.
Window - Class in org.apache.spark.sql.expressions
:: Experimental :: Utility functions for defining window in DataFrames.
window(Column, String, String, String) - Static method in class org.apache.spark.sql.functions
Bucketize rows into one or more time windows given a timestamp specifying column.
window(Column, String, String) - Static method in class org.apache.spark.sql.functions
Bucketize rows into one or more time windows given a timestamp specifying column.
window(Column, String) - Static method in class org.apache.spark.sql.functions
Generates tumbling time windows given a timestamp specifying column.
window(Duration) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Return a new DStream in which each RDD contains all the elements in seen in a sliding window of time over this DStream.
window(Duration, Duration) - Method in class org.apache.spark.streaming.api.java.JavaDStream
Return a new DStream in which each RDD contains all the elements in seen in a sliding window of time over this DStream.
window(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
window(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
window(Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream which is computed based on windowed batches of this DStream.
window(Duration, Duration) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
Return a new DStream which is computed based on windowed batches of this DStream.
window(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
window(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
window(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
window(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
window(Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
window(Duration, Duration) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
window(Duration) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD contains all the elements in seen in a sliding window of time over this DStream.
window(Duration, Duration) - Method in class org.apache.spark.streaming.dstream.DStream
Return a new DStream in which each RDD contains all the elements in seen in a sliding window of time over this DStream.
windowsDrive() - Static method in class org.apache.spark.util.Utils
Pattern for matching a Windows drive, which contains only a single alphabet character.
windowSize() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
windowSize() - Static method in class org.apache.spark.ml.feature.Word2VecModel
 
WindowSpec - Class in org.apache.spark.sql.expressions
:: Experimental :: A window specification that defines the partitioning, ordering, and frame boundaries.
wipe() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 
withColumn(String, Column) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset by adding a column or replacing the existing column that has the same name.
withColumnRenamed(String, String) - Method in class org.apache.spark.sql.Dataset
Returns a new Dataset with a column renamed.
withDummyCallSite(SparkContext, Function0<T>) - Static method in class org.apache.spark.util.Utils
To avoid calling Utils.getCallSite for every single RDD we create in the body, set a dummy call site that RDDs use instead.
withEdges(EdgeRDD<?>) - Method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
withEdges(EdgeRDD<?>) - Method in class org.apache.spark.graphx.VertexRDD
Prepares this VertexRDD for efficient joins with the given EdgeRDD.
withFilter(Function1<A, Object>) - Static method in class org.apache.spark.sql.types.StructType
 
withHiveExternalCatalog(SparkContext) - Static method in class org.apache.spark.sql.hive.HiveUtils
 
withIndex(int) - Method in class org.apache.spark.ml.attribute.Attribute
Copy with a new index.
withIndex(int) - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
withIndex(int) - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
withIndex(int) - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
withIndex(int) - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
withMax(double) - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy with a new max value.
withMean() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
withMean() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
withMean() - Method in class org.apache.spark.mllib.feature.StandardScalerModel
 
withMetadata(Metadata) - Method in class org.apache.spark.sql.types.MetadataBuilder
Include the content of an existing Metadata instance.
withMin(double) - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy with a new min value.
withName(String) - Method in class org.apache.spark.ml.attribute.Attribute
Copy with a new name.
withName(String) - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
withName(String) - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
withName(String) - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
withName(String) - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
withName(String) - Static method in class org.apache.spark.mllib.tree.configuration.Algo
 
withName(String) - Static method in class org.apache.spark.mllib.tree.configuration.EnsembleCombiningStrategy
 
withName(String) - Static method in class org.apache.spark.mllib.tree.configuration.FeatureType
 
withName(String) - Static method in class org.apache.spark.mllib.tree.configuration.QuantileStrategy
 
withName(String) - Static method in class org.apache.spark.rdd.CheckpointState
 
withName(String) - Static method in class org.apache.spark.scheduler.SchedulingMode
 
withName(String) - Static method in class org.apache.spark.scheduler.TaskLocality
 
withName(String) - Static method in class org.apache.spark.streaming.scheduler.ReceiverState
 
withName(String) - Static method in class org.apache.spark.TaskState
 
withNewChildren(Seq<BaseType>) - Static method in class org.apache.spark.sql.hive.execution.InsertIntoHiveTable
 
withNumValues(int) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Copy with a new `numValues` and empty `values`.
withoutIndex() - Method in class org.apache.spark.ml.attribute.Attribute
Copy without the index.
withoutIndex() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
withoutIndex() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
withoutIndex() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
withoutIndex() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
withoutMax() - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy without the max value.
withoutMin() - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy without the min value.
withoutName() - Method in class org.apache.spark.ml.attribute.Attribute
Copy without the name.
withoutName() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
 
withoutName() - Method in class org.apache.spark.ml.attribute.NominalAttribute
 
withoutName() - Method in class org.apache.spark.ml.attribute.NumericAttribute
 
withoutName() - Static method in class org.apache.spark.ml.attribute.UnresolvedAttribute
 
withoutNumValues() - Method in class org.apache.spark.ml.attribute.NominalAttribute
Copy without the `numValues`.
withoutSparsity() - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy without the sparsity.
withoutStd() - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy without the standard deviation.
withoutSummary() - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy without summary statistics.
withoutValues() - Method in class org.apache.spark.ml.attribute.BinaryAttribute
Copy without the values.
withoutValues() - Method in class org.apache.spark.ml.attribute.NominalAttribute
Copy without the values.
withPosition(Option<Object>, Option<Object>) - Method in exception org.apache.spark.sql.AnalysisException
 
withSparsity(double) - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy with a new sparsity.
withStd(double) - Method in class org.apache.spark.ml.attribute.NumericAttribute
Copy with a new standard deviation.
withStd() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
withStd() - Static method in class org.apache.spark.ml.feature.StandardScalerModel
 
withStd() - Method in class org.apache.spark.mllib.feature.StandardScalerModel
 
withValues(String, String) - Method in class org.apache.spark.ml.attribute.BinaryAttribute
Copy with new values.
withValues(String, String...) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Copy with new values and empty `numValues`.
withValues(String[]) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Copy with new values and empty `numValues`.
withValues(String, Seq<String>) - Method in class org.apache.spark.ml.attribute.NominalAttribute
Copy with new values and empty `numValues`.
word() - Method in class org.apache.spark.mllib.feature.VocabWord
 
Word2Vec - Class in org.apache.spark.ml.feature
:: Experimental :: Word2Vec trains a model of Map(String, Vector), i.e.
Word2Vec(String) - Constructor for class org.apache.spark.ml.feature.Word2Vec
 
Word2Vec() - Constructor for class org.apache.spark.ml.feature.Word2Vec
 
Word2Vec - Class in org.apache.spark.mllib.feature
Word2Vec creates vector representation of words in a text corpus.
Word2Vec() - Constructor for class org.apache.spark.mllib.feature.Word2Vec
 
Word2VecModel - Class in org.apache.spark.ml.feature
:: Experimental :: Model fitted by Word2Vec.
Word2VecModel - Class in org.apache.spark.mllib.feature
Word2Vec model param: wordIndex maps each word to an index, which can retrieve the corresponding vector from wordVectors param: wordVectors array of length numWords * vectorSize, vector corresponding to the word mapped with index i can be retrieved by the slice (i * vectorSize, i * vectorSize + vectorSize)
Word2VecModel(Map<String, float[]>) - Constructor for class org.apache.spark.mllib.feature.Word2VecModel
 
wrap(Object, ObjectInspector, DataType) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
wrap(InternalRow, Seq<ObjectInspector>, Object[], DataType[]) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
wrap(Seq<Object>, Seq<ObjectInspector>, Object[], DataType[]) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
wrapped() - Method in class org.apache.spark.sql.SparkSession
 
wrapperClass() - Static method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
wrapperFor(ObjectInspector, DataType) - Static method in class org.apache.spark.sql.hive.orc.OrcRelation
 
wrapperToFileSinkDesc(HiveShim.ShimFileSinkDesc) - Static method in class org.apache.spark.sql.hive.HiveShim
 
wrapRDD(RDD<Double>) - Method in class org.apache.spark.api.java.JavaDoubleRDD
 
wrapRDD(RDD<Tuple2<K, V>>) - Method in class org.apache.spark.api.java.JavaPairRDD
 
wrapRDD(RDD<T>) - Method in class org.apache.spark.api.java.JavaRDD
 
wrapRDD(RDD<T>) - Method in interface org.apache.spark.api.java.JavaRDDLike
 
wrapRDD(RDD<T>) - Method in class org.apache.spark.streaming.api.java.JavaDStream
 
wrapRDD(RDD<T>) - Method in interface org.apache.spark.streaming.api.java.JavaDStreamLike
 
wrapRDD(RDD<T>) - Static method in class org.apache.spark.streaming.api.java.JavaInputDStream
 
wrapRDD(RDD<Tuple2<K, V>>) - Method in class org.apache.spark.streaming.api.java.JavaPairDStream
 
wrapRDD(RDD<Tuple2<K, V>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairInputDStream
 
wrapRDD(RDD<Tuple2<K, V>>) - Static method in class org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream
 
wrapRDD(RDD<T>) - Static method in class org.apache.spark.streaming.api.java.JavaReceiverInputDStream
 
write(int) - Method in class org.apache.spark.io.SnappyOutputStreamWrapper
 
write(byte[]) - Method in class org.apache.spark.io.SnappyOutputStreamWrapper
 
write(byte[], int, int) - Method in class org.apache.spark.io.SnappyOutputStreamWrapper
 
write() - Method in class org.apache.spark.ml.classification.DecisionTreeClassificationModel
 
write() - Static method in class org.apache.spark.ml.classification.DecisionTreeClassifier
 
write() - Method in class org.apache.spark.ml.classification.GBTClassificationModel
 
write() - Static method in class org.apache.spark.ml.classification.GBTClassifier
 
write() - Static method in class org.apache.spark.ml.classification.LogisticRegression
 
write() - Method in class org.apache.spark.ml.classification.LogisticRegressionModel
Returns a MLWriter instance for this ML instance.
write() - Method in class org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
 
write() - Static method in class org.apache.spark.ml.classification.MultilayerPerceptronClassifier
 
write() - Static method in class org.apache.spark.ml.classification.NaiveBayes
 
write() - Method in class org.apache.spark.ml.classification.NaiveBayesModel
 
write() - Method in class org.apache.spark.ml.classification.OneVsRest
 
write() - Method in class org.apache.spark.ml.classification.OneVsRestModel
 
write() - Method in class org.apache.spark.ml.classification.RandomForestClassificationModel
 
write() - Static method in class org.apache.spark.ml.classification.RandomForestClassifier
 
write() - Static method in class org.apache.spark.ml.clustering.BisectingKMeans
 
write() - Method in class org.apache.spark.ml.clustering.BisectingKMeansModel
 
write() - Method in class org.apache.spark.ml.clustering.DistributedLDAModel
 
write() - Static method in class org.apache.spark.ml.clustering.GaussianMixture
 
write() - Method in class org.apache.spark.ml.clustering.GaussianMixtureModel
 
write() - Static method in class org.apache.spark.ml.clustering.KMeans
 
write() - Method in class org.apache.spark.ml.clustering.KMeansModel
 
write() - Static method in class org.apache.spark.ml.clustering.LDA
 
write() - Method in class org.apache.spark.ml.clustering.LocalLDAModel
 
write() - Static method in class org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
 
write() - Static method in class org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
 
write() - Static method in class org.apache.spark.ml.evaluation.RegressionEvaluator
 
write() - Static method in class org.apache.spark.ml.feature.Binarizer
 
write() - Static method in class org.apache.spark.ml.feature.Bucketizer
 
write() - Static method in class org.apache.spark.ml.feature.ChiSqSelector
 
write() - Method in class org.apache.spark.ml.feature.ChiSqSelectorModel
 
write() - Method in class org.apache.spark.ml.feature.ColumnPruner
 
write() - Static method in class org.apache.spark.ml.feature.CountVectorizer
 
write() - Method in class org.apache.spark.ml.feature.CountVectorizerModel
 
write() - Static method in class org.apache.spark.ml.feature.DCT
 
write() - Static method in class org.apache.spark.ml.feature.ElementwiseProduct
 
write() - Static method in class org.apache.spark.ml.feature.HashingTF
 
write() - Static method in class org.apache.spark.ml.feature.IDF
 
write() - Method in class org.apache.spark.ml.feature.IDFModel
 
write() - Static method in class org.apache.spark.ml.feature.IndexToString
 
write() - Static method in class org.apache.spark.ml.feature.Interaction
 
write() - Static method in class org.apache.spark.ml.feature.MaxAbsScaler
 
write() - Method in class org.apache.spark.ml.feature.MaxAbsScalerModel
 
write() - Static method in class org.apache.spark.ml.feature.MinMaxScaler
 
write() - Method in class org.apache.spark.ml.feature.MinMaxScalerModel
 
write() - Static method in class org.apache.spark.ml.feature.NGram
 
write() - Static method in class org.apache.spark.ml.feature.Normalizer
 
write() - Static method in class org.apache.spark.ml.feature.OneHotEncoder
 
write() - Static method in class org.apache.spark.ml.feature.PCA
 
write() - Method in class org.apache.spark.ml.feature.PCAModel
 
write() - Static method in class org.apache.spark.ml.feature.PolynomialExpansion
 
write() - Static method in class org.apache.spark.ml.feature.QuantileDiscretizer
 
write() - Static method in class org.apache.spark.ml.feature.RegexTokenizer
 
write() - Static method in class org.apache.spark.ml.feature.RFormula
 
write() - Method in class org.apache.spark.ml.feature.RFormulaModel
 
write() - Static method in class org.apache.spark.ml.feature.SQLTransformer
 
write() - Static method in class org.apache.spark.ml.feature.StandardScaler
 
write() - Method in class org.apache.spark.ml.feature.StandardScalerModel
 
write() - Static method in class org.apache.spark.ml.feature.StopWordsRemover
 
write() - Static method in class org.apache.spark.ml.feature.StringIndexer
 
write() - Method in class org.apache.spark.ml.feature.StringIndexerModel
 
write() - Static method in class org.apache.spark.ml.feature.Tokenizer
 
write() - Static method in class org.apache.spark.ml.feature.VectorAssembler
 
write() - Method in class org.apache.spark.ml.feature.VectorAttributeRewriter
 
write() - Static method in class org.apache.spark.ml.feature.VectorIndexer
 
write() - Method in class org.apache.spark.ml.feature.VectorIndexerModel
 
write() - Static method in class org.apache.spark.ml.feature.VectorSlicer
 
write() - Static method in class org.apache.spark.ml.feature.Word2Vec
 
write() - Method in class org.apache.spark.ml.feature.Word2VecModel
 
write() - Method in class org.apache.spark.ml.Pipeline
 
write() - Method in class org.apache.spark.ml.PipelineModel
 
write() - Static method in class org.apache.spark.ml.recommendation.ALS
 
write() - Method in class org.apache.spark.ml.recommendation.ALSModel
 
write() - Static method in class org.apache.spark.ml.regression.AFTSurvivalRegression
 
write() - Method in class org.apache.spark.ml.regression.AFTSurvivalRegressionModel
 
write() - Method in class org.apache.spark.ml.regression.DecisionTreeRegressionModel
 
write() - Static method in class org.apache.spark.ml.regression.DecisionTreeRegressor
 
write() - Method in class org.apache.spark.ml.regression.GBTRegressionModel
 
write() - Static method in class org.apache.spark.ml.regression.GBTRegressor
 
write() - Static method in class org.apache.spark.ml.regression.GeneralizedLinearRegression
 
write() - Method in class org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
 
write() - Static method in class org.apache.spark.ml.regression.IsotonicRegression
 
write() - Method in class org.apache.spark.ml.regression.IsotonicRegressionModel
 
write() - Static method in class org.apache.spark.ml.regression.LinearRegression
 
write() - Method in class org.apache.spark.ml.regression.LinearRegressionModel
Returns a MLWriter instance for this ML instance.
write() - Method in class org.apache.spark.ml.regression.RandomForestRegressionModel
 
write() - Static method in class org.apache.spark.ml.regression.RandomForestRegressor
 
write() - Method in class org.apache.spark.ml.tuning.CrossValidator
 
write() - Method in class org.apache.spark.ml.tuning.CrossValidatorModel
 
write() - Method in class org.apache.spark.ml.tuning.TrainValidationSplit
 
write() - Method in class org.apache.spark.ml.tuning.TrainValidationSplitModel
 
write() - Method in interface org.apache.spark.ml.util.MLWritable
Returns an MLWriter instance for this ML instance.
write(Kryo, Output, Iterable<?>) - Method in class org.apache.spark.serializer.JavaIterableWrapperSerializer
 
write() - Method in class org.apache.spark.sql.Dataset
:: Experimental :: Interface for saving the content of the Dataset out into external storage or streams.
write(int) - Method in class org.apache.spark.storage.memory.RedirectableOutputStream
 
write(byte[]) - Method in class org.apache.spark.storage.memory.RedirectableOutputStream
 
write(byte[], int, int) - Method in class org.apache.spark.storage.memory.RedirectableOutputStream
 
write(int) - Method in class org.apache.spark.storage.TimeTrackingOutputStream
 
write(byte[]) - Method in class org.apache.spark.storage.TimeTrackingOutputStream
 
write(byte[], int, int) - Method in class org.apache.spark.storage.TimeTrackingOutputStream
 
write(ByteBuffer, long) - Method in class org.apache.spark.streaming.util.WriteAheadLog
Write the record to the log and return a record handle, which contains all the information necessary to read back the written record.
WRITE_TIME() - Method in class org.apache.spark.InternalAccumulator.shuffleWrite$
 
WriteAheadLog - Class in org.apache.spark.streaming.util
:: DeveloperApi :: This abstract class represents a write ahead log (aka journal) that is used by Spark Streaming to save the received data (by receivers) and associated metadata to a reliable storage, so that they can be recovered after driver failures.
WriteAheadLog() - Constructor for class org.apache.spark.streaming.util.WriteAheadLog
 
WriteAheadLogRecordHandle - Class in org.apache.spark.streaming.util
:: DeveloperApi :: This abstract class represents a handle that refers to a record written in a WriteAheadLog.
WriteAheadLogRecordHandle() - Constructor for class org.apache.spark.streaming.util.WriteAheadLogRecordHandle
 
WriteAheadLogUtils - Class in org.apache.spark.streaming.util
A helper class with utility functions related to the WriteAheadLog interface
WriteAheadLogUtils() - Constructor for class org.apache.spark.streaming.util.WriteAheadLogUtils
 
writeAll(Iterator<T>, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializationStream
 
writeBoolean(DataOutputStream, boolean) - Static method in class org.apache.spark.api.r.SerDe
 
writeBooleanArr(DataOutputStream, boolean[]) - Static method in class org.apache.spark.api.r.SerDe
 
writeByteBuffer(ByteBuffer, DataOutput) - Static method in class org.apache.spark.util.Utils
Primitive often used when writing ByteBuffer to DataOutput
writeByteBuffer(ByteBuffer, OutputStream) - Static method in class org.apache.spark.util.Utils
Primitive often used when writing ByteBuffer to OutputStream
writeBytes(DataOutputStream, byte[]) - Static method in class org.apache.spark.api.r.SerDe
 
writeBytes() - Method in class org.apache.spark.status.api.v1.ShuffleWriteMetricDistributions
 
writeDate(DataOutputStream, Date) - Static method in class org.apache.spark.api.r.SerDe
 
writeDouble(DataOutputStream, double) - Static method in class org.apache.spark.api.r.SerDe
 
writeDoubleArr(DataOutputStream, double[]) - Static method in class org.apache.spark.api.r.SerDe
 
writeExternal(ObjectOutput) - Method in class org.apache.spark.serializer.JavaSerializer
 
writeExternal(ObjectOutput) - Method in class org.apache.spark.storage.BlockManagerId
 
writeExternal(ObjectOutput) - Method in class org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
 
writeExternal(ObjectOutput) - Method in class org.apache.spark.storage.StorageLevel
 
writeExternal(ObjectOutput, Map<CharSequence, CharSequence>, byte[]) - Static method in class org.apache.spark.streaming.flume.EventTransformer
 
writeExternal(ObjectOutput) - Method in class org.apache.spark.streaming.flume.SparkFlumeEvent
 
writeInt(DataOutputStream, int) - Static method in class org.apache.spark.api.r.SerDe
 
writeIntArr(DataOutputStream, int[]) - Static method in class org.apache.spark.api.r.SerDe
 
writeJObj(DataOutputStream, Object) - Static method in class org.apache.spark.api.r.SerDe
 
writeKey(T, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializationStream
Writes the object representing the key of a key-value pair.
writeObject(DataOutputStream, Object) - Static method in class org.apache.spark.api.r.SerDe
 
writeObject(T, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializationStream
The most general-purpose method to write an object.
writeRecords() - Method in class org.apache.spark.status.api.v1.ShuffleWriteMetricDistributions
 
writeReplace() - Method in class org.apache.spark.util.AccumulatorV2
 
writeSqlObject(DataOutputStream, Object) - Static method in class org.apache.spark.sql.api.r.SQLUtils
 
writeString(DataOutputStream, String) - Static method in class org.apache.spark.api.r.SerDe
 
writeStringArr(DataOutputStream, String[]) - Static method in class org.apache.spark.api.r.SerDe
 
writeTime(DataOutputStream, Time) - Static method in class org.apache.spark.api.r.SerDe
 
writeTime(DataOutputStream, Timestamp) - Static method in class org.apache.spark.api.r.SerDe
 
writeTime() - Method in class org.apache.spark.status.api.v1.ShuffleWriteMetricDistributions
 
writeTime() - Method in class org.apache.spark.status.api.v1.ShuffleWriteMetrics
 
writeTo(OutputStream) - Method in class org.apache.spark.util.sketch.BloomFilter
Writes out this BloomFilter to an output stream in binary format.
writeTo(OutputStream) - Method in class org.apache.spark.util.sketch.CountMinSketch
Writes out this CountMinSketch to an output stream in binary format.
writeType(DataOutputStream, String) - Static method in class org.apache.spark.api.r.SerDe
 
writeValue(T, ClassTag<T>) - Method in class org.apache.spark.serializer.SerializationStream
Writes the object representing the value of a key-value pair.

X

x() - Method in class org.apache.spark.mllib.optimization.NNLS.Workspace
 

Y

year(Column) - Static method in class org.apache.spark.sql.functions
Extracts the year as an integer from a given date/timestamp/string.

Z

zero() - Method in class org.apache.spark.Accumulable
Deprecated.
 
zero(R) - Method in interface org.apache.spark.AccumulableParam
Deprecated.
Return the "zero" (identity) value for an accumulator type, given its initial value.
zero() - Static method in class org.apache.spark.Accumulator
Deprecated.
 
zero(double) - Method in class org.apache.spark.AccumulatorParam.DoubleAccumulatorParam$
Deprecated.
 
zero(float) - Method in class org.apache.spark.AccumulatorParam.FloatAccumulatorParam$
Deprecated.
 
zero(int) - Method in class org.apache.spark.AccumulatorParam.IntAccumulatorParam$
Deprecated.
 
zero(long) - Method in class org.apache.spark.AccumulatorParam.LongAccumulatorParam$
Deprecated.
 
zero(String) - Method in class org.apache.spark.AccumulatorParam.StringAccumulatorParam$
Deprecated.
 
zero(int, int) - Static method in class org.apache.spark.mllib.clustering.ExpectationSum
 
zero() - Method in class org.apache.spark.sql.expressions.Aggregator
A zero value for this aggregation.
zeros(int, int) - Static method in class org.apache.spark.ml.linalg.DenseMatrix
Generate a DenseMatrix consisting of zeros.
zeros(int, int) - Static method in class org.apache.spark.ml.linalg.Matrices
Generate a Matrix consisting of zeros.
zeros(int) - Static method in class org.apache.spark.ml.linalg.Vectors
Creates a vector of all zeros.
zeros(int, int) - Static method in class org.apache.spark.mllib.linalg.DenseMatrix
Generate a DenseMatrix consisting of zeros.
zeros(int, int) - Static method in class org.apache.spark.mllib.linalg.Matrices
Generate a Matrix consisting of zeros.
zeros(int) - Static method in class org.apache.spark.mllib.linalg.Vectors
Creates a vector of all zeros.
zip(JavaRDDLike<U, ?>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
zip(JavaRDDLike<U, ?>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
zip(JavaRDDLike<U, ?>) - Static method in class org.apache.spark.api.java.JavaRDD
 
zip(JavaRDDLike<U, ?>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Zips this RDD with another one, returning key-value pairs with the first element in each RDD, second element in each RDD, etc.
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.api.r.RRDD
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zip(RDD<U>, ClassTag<U>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zip(RDD<U>, ClassTag<U>) - Method in class org.apache.spark.rdd.RDD
Zips this RDD with another one, returning key-value pairs with the first element in each RDD, second element in each RDD, etc.
zip(GenIterable<B>, CanBuildFrom<Repr, Tuple2<A1, B>, That>) - Static method in class org.apache.spark.sql.types.StructType
 
zipAll(GenIterable<B>, A1, B, CanBuildFrom<Repr, Tuple2<A1, B>, That>) - Static method in class org.apache.spark.sql.types.StructType
 
zipPartitions(JavaRDDLike<U, ?>, FlatMapFunction2<Iterator<T>, Iterator<U>, V>) - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
zipPartitions(JavaRDDLike<U, ?>, FlatMapFunction2<Iterator<T>, Iterator<U>, V>) - Static method in class org.apache.spark.api.java.JavaPairRDD
 
zipPartitions(JavaRDDLike<U, ?>, FlatMapFunction2<Iterator<T>, Iterator<U>, V>) - Static method in class org.apache.spark.api.java.JavaRDD
 
zipPartitions(JavaRDDLike<U, ?>, FlatMapFunction2<Iterator<T>, Iterator<U>, V>) - Method in interface org.apache.spark.api.java.JavaRDDLike
Zip this RDD's partitions with one (or more) RDD(s) and return a new RDD by applying a function to the zipped partitions.
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.api.r.RRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.api.r.RRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.api.r.RRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.api.r.RRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.api.r.RRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.api.r.RRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.graphx.VertexRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipPartitions(RDD<B>, boolean, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Method in class org.apache.spark.rdd.RDD
Zip this RDD's partitions with one (or more) RDD(s) and return a new RDD by applying a function to the zipped partitions.
zipPartitions(RDD<B>, Function2<Iterator<T>, Iterator<B>, Iterator<V>>, ClassTag<B>, ClassTag<V>) - Method in class org.apache.spark.rdd.RDD
 
zipPartitions(RDD<B>, RDD<C>, boolean, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Method in class org.apache.spark.rdd.RDD
 
zipPartitions(RDD<B>, RDD<C>, Function3<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<V>) - Method in class org.apache.spark.rdd.RDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, boolean, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Method in class org.apache.spark.rdd.RDD
 
zipPartitions(RDD<B>, RDD<C>, RDD<D>, Function4<Iterator<T>, Iterator<B>, Iterator<C>, Iterator<D>, Iterator<V>>, ClassTag<B>, ClassTag<C>, ClassTag<D>, ClassTag<V>) - Method in class org.apache.spark.rdd.RDD
 
zipWithIndex() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
zipWithIndex() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
zipWithIndex() - Static method in class org.apache.spark.api.java.JavaRDD
 
zipWithIndex() - Method in interface org.apache.spark.api.java.JavaRDDLike
Zips this RDD with its element indices.
zipWithIndex() - Static method in class org.apache.spark.api.r.RRDD
 
zipWithIndex() - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipWithIndex() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipWithIndex() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipWithIndex() - Static method in class org.apache.spark.graphx.VertexRDD
 
zipWithIndex() - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipWithIndex() - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipWithIndex() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipWithIndex() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipWithIndex() - Method in class org.apache.spark.rdd.RDD
Zips this RDD with its element indices.
zipWithIndex(CanBuildFrom<Repr, Tuple2<A1, Object>, That>) - Static method in class org.apache.spark.sql.types.StructType
 
zipWithUniqueId() - Static method in class org.apache.spark.api.java.JavaDoubleRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.api.java.JavaPairRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.api.java.JavaRDD
 
zipWithUniqueId() - Method in interface org.apache.spark.api.java.JavaRDDLike
Zips this RDD with generated unique Long ids.
zipWithUniqueId() - Static method in class org.apache.spark.api.r.RRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.graphx.EdgeRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.graphx.impl.EdgeRDDImpl
 
zipWithUniqueId() - Static method in class org.apache.spark.graphx.impl.VertexRDDImpl
 
zipWithUniqueId() - Static method in class org.apache.spark.graphx.VertexRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.rdd.HadoopRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.rdd.JdbcRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.rdd.NewHadoopRDD
 
zipWithUniqueId() - Static method in class org.apache.spark.rdd.PartitionPruningRDD
 
zipWithUniqueId() - Method in class org.apache.spark.rdd.RDD
Zips this RDD with generated unique Long ids.

_

_1() - Method in class org.apache.spark.util.MutablePair
 
_2() - Method in class org.apache.spark.util.MutablePair
 
_sqlContext() - Method in class org.apache.spark.sql.SparkSession.implicits$
 
_sqlContext() - Method in class org.apache.spark.sql.SQLContext.implicits$
 
_sqlContext() - Method in class org.apache.spark.sql.SQLImplicits
 
$ A B C D E F G H I J K L M N O P Q R S T U V W X Y Z _