diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala index 9618ff6062635..105a7a28441cc 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala @@ -40,9 +40,13 @@ object StaticSQLConf { .internal() .version("2.0.0") .stringConf - .checkValues(Set("hive", "in-memory")) .createWithDefault("in-memory") + val SESSION_STATE_IMPLEMENTATION = buildStaticConf("spark.sql.sessionStateImplementation") + .internal() + .stringConf + .createWithDefault(CATALOG_IMPLEMENTATION.defaultValueString) + val GLOBAL_TEMP_DATABASE = buildStaticConf("spark.sql.globalTempDatabase") .internal() .version("2.1.0") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index fc941270e1b64..27f0a5b10bd83 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -43,7 +43,7 @@ import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.command.ExternalCommandExecutor import org.apache.spark.sql.execution.datasources.{DataSource, LogicalRelation} import org.apache.spark.sql.internal._ -import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION +import org.apache.spark.sql.internal.StaticSQLConf.{CATALOG_IMPLEMENTATION, SESSION_STATE_IMPLEMENTATION} import org.apache.spark.sql.sources.BaseRelation import org.apache.spark.sql.streaming._ import org.apache.spark.sql.types.{DataType, StructType} @@ -866,6 +866,7 @@ object SparkSession extends Logging { */ def enableHiveSupport(): Builder = synchronized { if (hiveClassesArePresent) { + config(SESSION_STATE_IMPLEMENTATION.key, "hive") config(CATALOG_IMPLEMENTATION.key, "hive") } else { throw new IllegalArgumentException( @@ -1083,9 +1084,10 @@ object SparkSession extends Logging { "org.apache.spark.sql.hive.HiveSessionStateBuilder" private def sessionStateClassName(conf: SparkConf): String = { - conf.get(CATALOG_IMPLEMENTATION) match { + conf.get(SESSION_STATE_IMPLEMENTATION) match { case "hive" => HIVE_SESSION_STATE_BUILDER_CLASS_NAME case "in-memory" => classOf[SessionStateBuilder].getCanonicalName + case builder => builder } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala index ce4385d88f1e9..4b4ccabc46d64 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala @@ -197,6 +197,7 @@ object SharedState extends Logging { conf.get(CATALOG_IMPLEMENTATION) match { case "hive" => HIVE_EXTERNAL_CATALOG_CLASS_NAME case "in-memory" => classOf[InMemoryCatalog].getCanonicalName + case name => name } }