public class Filter extends SparkPlan implements UnaryNode, scala.Product, scala.Serializable
Constructor and Description |
---|
Filter(org.apache.spark.sql.catalyst.expressions.Expression condition,
SparkPlan child) |
Modifier and Type | Method and Description |
---|---|
SparkPlan |
child() |
org.apache.spark.sql.catalyst.expressions.Expression |
condition() |
scala.Function1<org.apache.spark.sql.catalyst.expressions.Row,Object> |
conditionEvaluator() |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
execute()
Runs this query returning the result as an RDD.
|
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
codegenEnabled, executeCollect, makeCopy, outputPartitioning, requiredChildDistribution
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, statePrefix, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, children, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, map, mapChildren, nodeName, numberedTreeString, otherCopyArgs, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
outputPartitioning
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public Filter(org.apache.spark.sql.catalyst.expressions.Expression condition, SparkPlan child)
public org.apache.spark.sql.catalyst.expressions.Expression condition()
public SparkPlan child()
child
in interface org.apache.spark.sql.catalyst.trees.UnaryNode<SparkPlan>
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<SparkPlan>
public scala.Function1<org.apache.spark.sql.catalyst.expressions.Row,Object> conditionEvaluator()