public class CreateTempTableUsing
extends org.apache.spark.sql.catalyst.plans.logical.Command
implements org.apache.spark.sql.execution.RunnableCommand, scala.Product, scala.Serializable
Constructor and Description |
---|
CreateTempTableUsing(String tableName,
scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema,
String provider,
scala.collection.immutable.Map<String,String> options) |
Modifier and Type | Method and Description |
---|---|
scala.collection.immutable.Map<String,String> |
options() |
String |
provider() |
scala.collection.Seq<scala.runtime.Nothing$> |
run(SQLContext sqlContext) |
String |
tableName() |
scala.Option<org.apache.spark.sql.types.StructType> |
userSpecifiedSchema() |
childrenResolved, cleanArgs, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, org$apache$spark$Logging$$log__$eq, org$apache$spark$Logging$$log_, org$apache$spark$sql$catalyst$plans$logical$LogicalPlan$$resolveAsColumn, org$apache$spark$sql$catalyst$plans$logical$LogicalPlan$$resolveAsTableColumn, resolve, resolve, resolveChildren, resolved, sameResult, statePrefix, statistics
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, collect, fastEquals, flatMap, foreach, foreachUp, generateTreeString, getNodeNumbered, makeCopy, map, mapChildren, nodeName, numberedTreeString, origin, otherCopyArgs, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, log_
public CreateTempTableUsing(String tableName, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, String provider, scala.collection.immutable.Map<String,String> options)
public String tableName()
public scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema()
public String provider()
public scala.collection.immutable.Map<String,String> options()
public scala.collection.Seq<scala.runtime.Nothing$> run(SQLContext sqlContext)
run
in interface org.apache.spark.sql.execution.RunnableCommand