public class ExistingRdd extends SparkPlan implements scala.Product, scala.Serializable
Constructor and Description |
---|
ExistingRdd(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output,
RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd) |
Modifier and Type | Method and Description |
---|---|
static Object |
convertToCatalyst(Object a) |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
execute()
Runs this query returning the result as an RDD.
|
static <A extends scala.Product> |
fromProductRdd(RDD<A> productRdd,
scala.reflect.api.TypeTags.TypeTag<A> evidence$1) |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
static <A extends scala.Product> |
productToRowRdd(RDD<A> data) |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
rdd() |
executeCollect, outputPartitioning, requiredChildDistribution
expressions, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, children, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, id, makeCopy, map, mapChildren, nextId, nodeName, numberedTreeString, otherCopyArgs, sameInstance, simpleString, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
public ExistingRdd(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd)
public static Object convertToCatalyst(Object a)
public static <A extends scala.Product> RDD<org.apache.spark.sql.catalyst.expressions.Row> productToRowRdd(RDD<A> data)
public static <A extends scala.Product> ExistingRdd fromProductRdd(RDD<A> productRdd, scala.reflect.api.TypeTags.TypeTag<A> evidence$1)
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<SparkPlan>
public RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd()