public class SparkHiveWriterContainer extends Object implements Logging, SparkHadoopMapRedUtil, scala.Serializable
SparkHadoopWriter
.Constructor and Description |
---|
SparkHiveWriterContainer(org.apache.hadoop.mapred.JobConf jobConf,
ShimFileSinkDesc fileSinkConf) |
Modifier and Type | Method and Description |
---|---|
void |
close() |
void |
commitJob() |
static org.apache.hadoop.fs.Path |
createPathFromString(String path,
org.apache.hadoop.mapred.JobConf conf) |
void |
driverSideSetup() |
void |
executorSideSetup(int jobId,
int splitId,
int attemptId) |
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter |
getLocalFileWriter(org.apache.spark.sql.catalyst.expressions.Row row) |
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
firstAvailableClass, newJobContext, newTaskAttemptContext, newTaskAttemptID
public SparkHiveWriterContainer(org.apache.hadoop.mapred.JobConf jobConf, ShimFileSinkDesc fileSinkConf)
public static org.apache.hadoop.fs.Path createPathFromString(String path, org.apache.hadoop.mapred.JobConf conf)
public void driverSideSetup()
public void executorSideSetup(int jobId, int splitId, int attemptId)
public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getLocalFileWriter(org.apache.spark.sql.catalyst.expressions.Row row)
public void close()
public void commitJob()