public class TestHiveContext extends LocalHiveContext
Data from testTables
will be automatically loaded whenever a query is run over those tables.
Calling reset
will delete all tables and other state in the database, leaving the database
in a "clean" state.
TestHive is singleton object version of this class because instantiating multiple copies of the hive metastore seems to lead to weird non-deterministic failures. Therefore, the execution of test cases that rely on TestHive must be serialized.
Modifier and Type | Class and Description |
---|---|
class |
TestHiveContext.QueryExecution
Override QueryExecution with special debug workflow.
|
class |
TestHiveContext.TestTable |
Constructor and Description |
---|
TestHiveContext(SparkContext sc) |
Modifier and Type | Method and Description |
---|---|
boolean |
cacheTables() |
scala.util.matching.Regex |
describedTable() |
TestHiveContext.QueryExecution |
executePlan(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) |
java.io.File |
getHiveFile(String path) |
scala.Option<java.io.File> |
hiveDevHome()
The location of the hive source code.
|
java.io.File |
hiveFilesTemp() |
scala.Option<java.io.File> |
hiveHome()
The location of the compiled hive distribution
|
scala.collection.Seq<TestHiveContext.TestTable> |
hiveQTestUtilTables() |
java.io.File |
inRepoTests() |
void |
loadTestTable(String name) |
String |
metastorePath() |
scala.collection.mutable.HashMap<String,TestHiveContext.TestTable> |
registerTestTable(TestHiveContext.TestTable testTable) |
void |
reset()
Resets the test instance by deleting any tables that have been created.
|
scala.collection.Seq<String> |
runSqlHive(String sql)
Runs the specified SQL query using Hive.
|
scala.collection.mutable.HashMap<String,TestHiveContext.TestTable> |
testTables()
A list of test tables and the DDL required to initialize them.
|
String |
warehousePath() |
createTable, hivePlanner, hiveql, hql, set
cacheTable, createParquetFile, createSchemaRDD, isCached, jsonFile, jsonFile, jsonRDD, jsonRDD, logicalPlanToSparkQuery, parquetFile, registerRDDAsTable, sparkContext, sql, table, uncacheTable
public TestHiveContext(SparkContext sc)
public String warehousePath()
warehousePath
in class LocalHiveContext
public String metastorePath()
metastorePath
in class LocalHiveContext
public scala.Option<java.io.File> hiveHome()
public scala.Option<java.io.File> hiveDevHome()
public scala.collection.Seq<String> runSqlHive(String sql)
HiveContext
public TestHiveContext.QueryExecution executePlan(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan)
public java.io.File hiveFilesTemp()
public java.io.File inRepoTests()
public java.io.File getHiveFile(String path)
public scala.util.matching.Regex describedTable()
public scala.collection.mutable.HashMap<String,TestHiveContext.TestTable> testTables()
public scala.collection.mutable.HashMap<String,TestHiveContext.TestTable> registerTestTable(TestHiveContext.TestTable testTable)
public scala.collection.Seq<TestHiveContext.TestTable> hiveQTestUtilTables()
public boolean cacheTables()
public void loadTestTable(String name)
public void reset()