public class ExistingRdd extends SparkPlan implements scala.Product, scala.Serializable
Constructor and Description |
---|
ExistingRdd(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output,
RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd) |
Modifier and Type | Method and Description |
---|---|
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
execute()
Runs this query returning the result as an RDD.
|
static <A extends scala.Product> |
fromProductRdd(RDD<A> productRdd,
scala.reflect.api.TypeTags.TypeTag<A> evidence$1) |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
static <A extends scala.Product> |
productToRowRdd(RDD<A> data) |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
rdd() |
codegenEnabled, executeCollect, makeCopy, outputPartitioning, requiredChildDistribution
expressions, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, schema, schemaString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, children, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, id, map, mapChildren, nextId, nodeName, numberedTreeString, otherCopyArgs, sameInstance, simpleString, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
productArity, productElement, productIterator, productPrefix
initialized, initializeIfNecessary, initializeLogging, initLock, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public ExistingRdd(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd)
public static <A extends scala.Product> RDD<org.apache.spark.sql.catalyst.expressions.Row> productToRowRdd(RDD<A> data)
public static <A extends scala.Product> ExistingRdd fromProductRdd(RDD<A> productRdd, scala.reflect.api.TypeTags.TypeTag<A> evidence$1)
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<SparkPlan>
public RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd()