public class EvaluatePython
extends org.apache.spark.sql.catalyst.plans.logical.UnaryNode
implements scala.Product, scala.Serializable
PythonUDF
, appending the result to the end of the input tuple.Constructor and Description |
---|
EvaluatePython(org.apache.spark.sql.execution.PythonUDF udf,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child) |
Modifier and Type | Method and Description |
---|---|
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
child() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
org.apache.spark.sql.catalyst.expressions.AttributeReference |
resultAttribute() |
org.apache.spark.sql.execution.PythonUDF |
udf() |
childrenResolved, inputSet, resolve, resolve, resolveChildren, resolved, statistics, Statistics
expressions, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, schema, schemaString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, id, makeCopy, map, mapChildren, nextId, nodeName, numberedTreeString, otherCopyArgs, sameInstance, simpleString, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
public EvaluatePython(org.apache.spark.sql.execution.PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child)
public org.apache.spark.sql.execution.PythonUDF udf()
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child()
child
in interface org.apache.spark.sql.catalyst.trees.UnaryNode<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
public org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>