public class InsertIntoHiveTable extends SparkPlan implements UnaryNode, Command, HiveInspectors, scala.Product, scala.Serializable
HiveInspectors.typeInfoConversions
Constructor and Description |
---|
InsertIntoHiveTable(MetastoreRelation table,
scala.collection.immutable.Map<String,scala.Option<String>> partition,
SparkPlan child,
boolean overwrite,
HiveContext sc) |
Modifier and Type | Method and Description |
---|---|
SparkPlan |
child() |
scala.collection.immutable.List<HiveContext> |
otherCopyArgs() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
Class<? extends org.apache.hadoop.io.Writable> |
outputClass() |
boolean |
overwrite() |
scala.collection.immutable.Map<String,scala.Option<String>> |
partition() |
void |
saveAsHiveFile(RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd,
Class<?> valueClass,
ShimFileSinkDesc fileSinkConf,
SerializableWritable<org.apache.hadoop.mapred.JobConf> conf,
SparkHiveWriterContainer writerContainer) |
MetastoreRelation |
table() |
codegenEnabled, execute, executeCollect, makeCopy, outputPartitioning, requiredChildDistribution
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, statePrefix, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, argString, asCode, children, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, map, mapChildren, nodeName, numberedTreeString, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
outputPartitioning
execute, executeCollect
inspectorToDataType, javaClassToDataType, toInspector, toInspector, unwrap, wrap, wrap, wrapperFor
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public InsertIntoHiveTable(MetastoreRelation table, scala.collection.immutable.Map<String,scala.Option<String>> partition, SparkPlan child, boolean overwrite, HiveContext sc)
public MetastoreRelation table()
public scala.collection.immutable.Map<String,scala.Option<String>> partition()
public SparkPlan child()
child
in interface org.apache.spark.sql.catalyst.trees.UnaryNode<SparkPlan>
public boolean overwrite()
public Class<? extends org.apache.hadoop.io.Writable> outputClass()
public scala.collection.immutable.List<HiveContext> otherCopyArgs()
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
output
in class org.apache.spark.sql.catalyst.plans.QueryPlan<SparkPlan>
public void saveAsHiveFile(RDD<org.apache.spark.sql.catalyst.expressions.Row> rdd, Class<?> valueClass, ShimFileSinkDesc fileSinkConf, SerializableWritable<org.apache.hadoop.mapred.JobConf> conf, SparkHiveWriterContainer writerContainer)