public interface SaveAsHiveFile
extends org.apache.spark.sql.execution.command.DataWritingCommand
Modifier and Type | Method and Description |
---|---|
scala.Option<org.apache.hadoop.fs.Path> |
createdTempDir() |
void |
deleteExternalTmpPath(org.apache.hadoop.conf.Configuration hadoopConf) |
String |
executionId() |
org.apache.hadoop.fs.Path |
getExternalScratchDir(java.net.URI extURI,
org.apache.hadoop.conf.Configuration hadoopConf,
String stagingDir) |
org.apache.hadoop.fs.Path |
getExternalTmpPath(SparkSession sparkSession,
org.apache.hadoop.conf.Configuration hadoopConf,
org.apache.hadoop.fs.Path path) |
org.apache.hadoop.fs.Path |
getExtTmpPathRelTo(org.apache.hadoop.fs.Path path,
org.apache.hadoop.conf.Configuration hadoopConf,
String stagingDir) |
org.apache.hadoop.fs.Path |
getStagingDir(org.apache.hadoop.fs.Path inputPath,
org.apache.hadoop.conf.Configuration hadoopConf,
String stagingDir) |
org.apache.hadoop.fs.Path |
newVersionExternalTempPath(org.apache.hadoop.fs.Path path,
org.apache.hadoop.conf.Configuration hadoopConf,
String stagingDir) |
org.apache.hadoop.fs.Path |
oldVersionExternalTempPath(org.apache.hadoop.fs.Path path,
org.apache.hadoop.conf.Configuration hadoopConf,
String scratchDir) |
scala.collection.immutable.Set<String> |
saveAsHiveFile(SparkSession sparkSession,
org.apache.spark.sql.execution.SparkPlan plan,
org.apache.hadoop.conf.Configuration hadoopConf,
org.apache.spark.sql.hive.HiveShim.ShimFileSinkDesc fileSinkConf,
String outputLocation,
scala.collection.immutable.Map<scala.collection.immutable.Map<String,String>,String> customPartitionLocations,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> partitionAttributes) |
scala.Option<org.apache.hadoop.fs.Path> createdTempDir()
void deleteExternalTmpPath(org.apache.hadoop.conf.Configuration hadoopConf)
String executionId()
org.apache.hadoop.fs.Path getExtTmpPathRelTo(org.apache.hadoop.fs.Path path, org.apache.hadoop.conf.Configuration hadoopConf, String stagingDir)
org.apache.hadoop.fs.Path getExternalScratchDir(java.net.URI extURI, org.apache.hadoop.conf.Configuration hadoopConf, String stagingDir)
org.apache.hadoop.fs.Path getExternalTmpPath(SparkSession sparkSession, org.apache.hadoop.conf.Configuration hadoopConf, org.apache.hadoop.fs.Path path)
org.apache.hadoop.fs.Path getStagingDir(org.apache.hadoop.fs.Path inputPath, org.apache.hadoop.conf.Configuration hadoopConf, String stagingDir)
org.apache.hadoop.fs.Path newVersionExternalTempPath(org.apache.hadoop.fs.Path path, org.apache.hadoop.conf.Configuration hadoopConf, String stagingDir)
org.apache.hadoop.fs.Path oldVersionExternalTempPath(org.apache.hadoop.fs.Path path, org.apache.hadoop.conf.Configuration hadoopConf, String scratchDir)
scala.collection.immutable.Set<String> saveAsHiveFile(SparkSession sparkSession, org.apache.spark.sql.execution.SparkPlan plan, org.apache.hadoop.conf.Configuration hadoopConf, org.apache.spark.sql.hive.HiveShim.ShimFileSinkDesc fileSinkConf, String outputLocation, scala.collection.immutable.Map<scala.collection.immutable.Map<String,String>,String> customPartitionLocations, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> partitionAttributes)