public class EvaluatePython
extends org.apache.spark.sql.catalyst.plans.logical.UnaryNode
implements scala.Product, scala.Serializable
PythonUDF
, appending the result to the end of the input tuple.Constructor and Description |
---|
EvaluatePython(PythonUDF udf,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child,
org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute) |
Modifier and Type | Method and Description |
---|---|
static EvaluatePython |
apply(PythonUDF udf,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child) |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
child() |
static Object |
fromJava(Object obj,
org.apache.spark.sql.catalyst.types.DataType dataType) |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
org.apache.spark.sql.catalyst.expressions.AttributeReference |
resultAttribute() |
static Object[] |
rowToArray(org.apache.spark.sql.catalyst.expressions.Row row,
scala.collection.Seq<org.apache.spark.sql.catalyst.types.DataType> fields)
Convert Row into Java Array (for pickled into Python)
|
static Object |
toJava(Object obj,
org.apache.spark.sql.catalyst.types.DataType dataType)
Helper for converting a Scala object to a java suitable for pyspark serialization.
|
PythonUDF |
udf() |
childrenResolved, cleanArgs, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, org$apache$spark$Logging$$log__$eq, org$apache$spark$Logging$$log_, resolve, resolve, resolveChildren, resolved, sameResult, statePrefix, statistics
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, makeCopy, map, mapChildren, nodeName, numberedTreeString, otherCopyArgs, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, log_
public EvaluatePython(PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child, org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute)
public static EvaluatePython apply(PythonUDF udf, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child)
public static Object toJava(Object obj, org.apache.spark.sql.catalyst.types.DataType dataType)
public static Object[] rowToArray(org.apache.spark.sql.catalyst.expressions.Row row, scala.collection.Seq<org.apache.spark.sql.catalyst.types.DataType> fields)
public static Object fromJava(Object obj, org.apache.spark.sql.catalyst.types.DataType dataType)
public PythonUDF udf()
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child()
child
in interface org.apache.spark.sql.catalyst.trees.UnaryNode<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>
public org.apache.spark.sql.catalyst.expressions.AttributeReference resultAttribute()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>