diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala index f351993eb1b7a..aeeedebe330dd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala @@ -35,7 +35,7 @@ import org.apache.spark.sql.errors.QueryCompilationErrors * * @param database The system preserved virtual database that keeps all the global temporary views. */ -class GlobalTempViewManager(val database: String) { +class GlobalTempViewManager(database: String) { /** List of view definitions, mapping from view name to logical plan. */ @GuardedBy("this") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index dbf2102a183ad..7a19f276b513b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -123,6 +123,7 @@ class SessionCatalog( lazy val externalCatalog = externalCatalogBuilder() lazy val globalTempViewManager = globalTempViewManagerBuilder() + val globalTempDatabase: String = SQLConf.get.globalTempDatabase /** List of temporary views, mapping from table name to their logical plan. */ @GuardedBy("this") @@ -273,9 +274,9 @@ class SessionCatalog( def createDatabase(dbDefinition: CatalogDatabase, ignoreIfExists: Boolean): Unit = { val dbName = format(dbDefinition.name) - if (dbName == globalTempViewManager.database) { + if (dbName == globalTempDatabase) { throw QueryCompilationErrors.cannotCreateDatabaseWithSameNameAsPreservedDatabaseError( - globalTempViewManager.database) + globalTempDatabase) } validateName(dbName) externalCatalog.createDatabase( @@ -333,9 +334,9 @@ class SessionCatalog( def setCurrentDatabase(db: String): Unit = { val dbName = format(db) - if (dbName == globalTempViewManager.database) { + if (dbName == globalTempDatabase) { throw QueryCompilationErrors.cannotUsePreservedDatabaseAsCurrentDatabaseError( - globalTempViewManager.database) + globalTempDatabase) } requireDbExists(dbName) synchronized { currentDb = dbName } @@ -659,7 +660,7 @@ class SessionCatalog( } else { false } - } else if (format(name.database.get) == globalTempViewManager.database) { + } else if (format(name.database.get) == globalTempDatabase) { globalTempViewManager.update(viewName, viewDefinition) } else { false @@ -767,9 +768,9 @@ class SessionCatalog( val table = format(name.table) if (name.database.isEmpty) { tempViews.get(table).map(_.tableMeta).getOrElse(getTableMetadata(name)) - } else if (format(name.database.get) == globalTempViewManager.database) { + } else if (format(name.database.get) == globalTempDatabase) { globalTempViewManager.get(table).map(_.tableMeta) - .getOrElse(throw new NoSuchTableException(globalTempViewManager.database, table)) + .getOrElse(throw new NoSuchTableException(globalTempDatabase, table)) } else { getTableMetadata(name) } @@ -795,7 +796,7 @@ class SessionCatalog( val oldTableName = qualifiedIdent.table val newTableName = format(newName.table) - if (db == globalTempViewManager.database) { + if (db == globalTempDatabase) { globalTempViewManager.rename(oldTableName, newTableName) } else { requireDbExists(db) @@ -832,10 +833,10 @@ class SessionCatalog( val qualifiedIdent = qualifyIdentifier(name) val db = qualifiedIdent.database.get val table = qualifiedIdent.table - if (db == globalTempViewManager.database) { + if (db == globalTempDatabase) { val viewExists = globalTempViewManager.remove(table) if (!viewExists && !ignoreIfNotExists) { - throw new NoSuchTableException(globalTempViewManager.database, table) + throw new NoSuchTableException(globalTempDatabase, table) } } else { if (name.database.isDefined || !tempViews.contains(table)) { @@ -873,7 +874,7 @@ class SessionCatalog( val qualifiedIdent = qualifyIdentifier(name) val db = qualifiedIdent.database.get val table = qualifiedIdent.table - if (db == globalTempViewManager.database) { + if (db == globalTempDatabase) { globalTempViewManager.get(table).map { viewDef => SubqueryAlias(table, db, getTempViewPlan(viewDef)) }.getOrElse(throw new NoSuchTableException(db, table)) @@ -1026,7 +1027,7 @@ class SessionCatalog( } def isGlobalTempViewDB(dbName: String): Boolean = { - globalTempViewManager.database.equalsIgnoreCase(dbName) + globalTempDatabase.equalsIgnoreCase(dbName) } /** @@ -1085,9 +1086,9 @@ class SessionCatalog( pattern: String, includeLocalTempViews: Boolean): Seq[TableIdentifier] = { val dbName = format(db) - val dbTables = if (dbName == globalTempViewManager.database) { + val dbTables = if (dbName == globalTempDatabase) { globalTempViewManager.listViewNames(pattern).map { name => - TableIdentifier(name, Some(globalTempViewManager.database)) + TableIdentifier(name, Some(globalTempDatabase)) } } else { requireDbExists(dbName) @@ -1108,9 +1109,9 @@ class SessionCatalog( */ def listViews(db: String, pattern: String): Seq[TableIdentifier] = { val dbName = format(db) - val dbViews = if (dbName == globalTempViewManager.database) { + val dbViews = if (dbName == globalTempDatabase) { globalTempViewManager.listViewNames(pattern).map { name => - TableIdentifier(name, Some(globalTempViewManager.database)) + TableIdentifier(name, Some(globalTempDatabase)) } } else { requireDbExists(dbName) @@ -1126,7 +1127,7 @@ class SessionCatalog( * List all matching temp views in the specified database, including global/local temporary views. */ def listTempViews(db: String, pattern: String): Seq[CatalogTable] = { - val globalTempViews = if (format(db) == globalTempViewManager.database) { + val globalTempViews = if (format(db) == globalTempDatabase) { globalTempViewManager.listViewNames(pattern).flatMap { viewName => globalTempViewManager.get(viewName).map(_.tableMeta) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index f4751f2027894..ade0ba52cf9eb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -5949,6 +5949,8 @@ class SQLConf extends Serializable with Logging with SqlApiConf { def defaultDatabase: String = getConf(StaticSQLConf.CATALOG_DEFAULT_DATABASE) + def globalTempDatabase: String = getConf(StaticSQLConf.GLOBAL_TEMP_DATABASE) + def allowsTempViewCreationWithMultipleNameparts: Boolean = getConf(SQLConf.ALLOW_TEMP_VIEW_CREATION_WITH_MULTIPLE_NAME_PARTS) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index e9a60ff17fc1b..48f829548bb65 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -933,17 +933,17 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { createTempView(catalog, "temp_view4", tempTable, overrideIfExists = false) createGlobalTempView(catalog, "global_temp_view1", tempTable, overrideIfExists = false) createGlobalTempView(catalog, "global_temp_view2", tempTable, overrideIfExists = false) - assert(catalog.listTables(catalog.globalTempViewManager.database, "*").toSet == + assert(catalog.listTables(catalog.globalTempDatabase, "*").toSet == Set(TableIdentifier("temp_view1"), TableIdentifier("temp_view4"), - TableIdentifier("global_temp_view1", Some(catalog.globalTempViewManager.database)), - TableIdentifier("global_temp_view2", Some(catalog.globalTempViewManager.database)))) - assert(catalog.listTables(catalog.globalTempViewManager.database, "*temp_view1").toSet == + TableIdentifier("global_temp_view1", Some(catalog.globalTempDatabase)), + TableIdentifier("global_temp_view2", Some(catalog.globalTempDatabase)))) + assert(catalog.listTables(catalog.globalTempDatabase, "*temp_view1").toSet == Set(TableIdentifier("temp_view1"), - TableIdentifier("global_temp_view1", Some(catalog.globalTempViewManager.database)))) - assert(catalog.listTables(catalog.globalTempViewManager.database, "global*").toSet == - Set(TableIdentifier("global_temp_view1", Some(catalog.globalTempViewManager.database)), - TableIdentifier("global_temp_view2", Some(catalog.globalTempViewManager.database)))) + TableIdentifier("global_temp_view1", Some(catalog.globalTempDatabase)))) + assert(catalog.listTables(catalog.globalTempDatabase, "global*").toSet == + Set(TableIdentifier("global_temp_view1", Some(catalog.globalTempDatabase)), + TableIdentifier("global_temp_view2", Some(catalog.globalTempDatabase)))) } } @@ -1906,9 +1906,9 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { assert(catalog.getCachedTable(qualifiedName1) != null) createGlobalTempView(catalog, "tbl2", Range(2, 10, 1, 10), false) - val qualifiedName2 = QualifiedTableName(catalog.globalTempViewManager.database, "tbl2") + val qualifiedName2 = QualifiedTableName(catalog.globalTempDatabase, "tbl2") catalog.cacheTable(qualifiedName2, Range(2, 10, 1, 10)) - catalog.refreshTable(TableIdentifier("tbl2", Some(catalog.globalTempViewManager.database))) + catalog.refreshTable(TableIdentifier("tbl2", Some(catalog.globalTempDatabase))) assert(catalog.getCachedTable(qualifiedName2) != null) } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala index 7b0ce3e59263f..65a7a0ebbd916 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala @@ -42,7 +42,7 @@ case class AnalyzeColumnCommand( val sessionState = sparkSession.sessionState tableIdent.database match { - case Some(db) if db == sparkSession.sharedState.globalTempViewManager.database => + case Some(db) if db == sparkSession.sharedState.globalTempDB => val plan = sessionState.catalog.getGlobalTempView(tableIdent.identifier).getOrElse { throw QueryCompilationErrors.noSuchTableError(db, tableIdent.identifier) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala index 88fd72a3cb7a7..2b1451493398f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala @@ -169,11 +169,12 @@ private[sql] class SharedState( wrapped } + val globalTempDB = conf.get(GLOBAL_TEMP_DATABASE) + /** * A manager for global temporary views. */ lazy val globalTempViewManager: GlobalTempViewManager = { - val globalTempDB = conf.get(GLOBAL_TEMP_DATABASE) if (externalCatalog.databaseExists(globalTempDB)) { throw QueryExecutionErrors.databaseNameConflictWithSystemPreservedDatabaseError(globalTempDB) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala index d023fb82185a8..71b420bb85eac 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala @@ -1436,7 +1436,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils withSQLConf(SQLConf.STORE_ANALYZED_PLAN_FOR_VIEW.key -> storeAnalyzed.toString) { withGlobalTempView("view1") { withTempView("view2") { - val db = spark.sharedState.globalTempViewManager.database + val db = spark.sharedState.globalTempDB sql("CREATE GLOBAL TEMPORARY VIEW view1 AS SELECT * FROM testData WHERE key > 1") sql(s"CACHE TABLE view2 AS SELECT * FROM ${db}.view1 WHERE value > 1") assert(spark.catalog.isCached("view2")) @@ -1487,7 +1487,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils withSQLConf(SQLConf.STORE_ANALYZED_PLAN_FOR_VIEW.key -> storeAnalyzed.toString) { withGlobalTempView("view1") { withTempView("view2") { - val db = spark.sharedState.globalTempViewManager.database + val db = spark.sharedState.globalTempDB sql("CREATE GLOBAL TEMPORARY VIEW view1 AS SELECT * FROM testData WHERE key > 1") sql(s"CACHE TABLE view2 AS SELECT * FROM $db.view1 WHERE value > 1") assert(spark.catalog.isCached("view2")) @@ -1517,7 +1517,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils Seq(true, false).foreach { storeAnalyzed => withSQLConf(SQLConf.STORE_ANALYZED_PLAN_FOR_VIEW.key -> storeAnalyzed.toString) { withGlobalTempView("global_tv") { - val db = spark.sharedState.globalTempViewManager.database + val db = spark.sharedState.globalTempDB testAlterTemporaryViewAsWithCache(TableIdentifier("global_tv", Some(db)), storeAnalyzed) } } @@ -1575,7 +1575,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils test("SPARK-34699: CREATE GLOBAL TEMP VIEW USING should uncache correctly") { withGlobalTempView("global_tv") { - val db = spark.sharedState.globalTempViewManager.database + val db = spark.sharedState.globalTempDB testCreateTemporaryViewUsingWithCache(TableIdentifier("global_tv", Some(db))) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala index 393ecc95b66b2..5f1fa2904e341 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala @@ -644,7 +644,7 @@ class StatisticsCollectionSuite extends StatisticsCollectionTestBase with Shared test("analyzes column statistics in cached global temporary view") { withGlobalTempView("gTempView") { - val globalTempDB = spark.sharedState.globalTempViewManager.database + val globalTempDB = spark.sharedState.globalTempDB val e1 = intercept[AnalysisException] { sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala index 6e2200380d6cc..31d8dd0740e14 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala @@ -30,7 +30,7 @@ class GlobalTempViewSuite extends QueryTest with SharedSparkSession { override protected def beforeAll(): Unit = { super.beforeAll() - globalTempDB = spark.sharedState.globalTempViewManager.database + globalTempDB = spark.sharedState.globalTempDB } private var globalTempDB: String = _ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 91031ef642a02..f54a4f4606061 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -91,7 +91,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`jtv1`", "tempObj" -> "VIEW", "tempObjName" -> "`temp_jtv1`")) - val globalTempDB = spark.sharedState.globalTempViewManager.database + val globalTempDB = spark.sharedState.globalTempDB sql("CREATE GLOBAL TEMP VIEW global_temp_jtv1 AS SELECT * FROM jt WHERE id > 0") checkError( exception = intercept[AnalysisException] { @@ -1102,7 +1102,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { test("local temp view refers global temp view") { withGlobalTempView("v1") { withTempView("v2") { - val globalTempDB = spark.sharedState.globalTempViewManager.database + val globalTempDB = spark.sharedState.globalTempDB sql("CREATE GLOBAL TEMPORARY VIEW v1 AS SELECT 1") sql(s"CREATE TEMPORARY VIEW v2 AS SELECT * FROM ${globalTempDB}.v1") checkAnswer(sql("SELECT * FROM v2"), Seq(Row(1))) @@ -1113,7 +1113,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { test("global temp view refers local temp view") { withTempView("v1") { withGlobalTempView("v2") { - val globalTempDB = spark.sharedState.globalTempViewManager.database + val globalTempDB = spark.sharedState.globalTempDB sql("CREATE TEMPORARY VIEW v1 AS SELECT 1") sql(s"CREATE GLOBAL TEMPORARY VIEW v2 AS SELECT * FROM v1") checkAnswer(sql(s"SELECT * FROM ${globalTempDB}.v2"), Seq(Row(1))) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala index f15c989fc072c..e75413b804f48 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala @@ -536,7 +536,7 @@ class LocalTempViewTestSuite extends TempViewTestSuite with SharedSparkSession { } class GlobalTempViewTestSuite extends TempViewTestSuite with SharedSparkSession { - private def db: String = spark.sharedState.globalTempViewManager.database + private def db: String = spark.sharedState.globalTempDB override protected def viewTypeString: String = "GLOBAL TEMPORARY VIEW" override protected def formattedViewName(viewName: String): String = { s"$db.$viewName" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala index 02c9d318bb46f..ef9ae47847405 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala @@ -236,7 +236,7 @@ trait AlterTableDropPartitionSuiteBase extends QueryTest with DDLCommandTestUtil checkCachedRelation("v1", Seq(Row(0, 0), Row(3, 3))) } - val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2" + val v2 = s"${spark.sharedState.globalTempDB}.v2" withGlobalTempView("v2") { sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t") cacheRelation(v2) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala index 0aaeb8d2160c3..d91085956e330 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala @@ -222,7 +222,7 @@ trait AlterTableRenamePartitionSuiteBase extends QueryTest with DDLCommandTestUt checkCachedRelation("v1", Seq(Row(0, 2), Row(1, 3))) } - val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2" + val v2 = s"${spark.sharedState.globalTempDB}.v2" withGlobalTempView("v2") { sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t") cacheRelation(v2) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index b4eeffab8d855..216472ad23db6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -2260,7 +2260,7 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase { ) withGlobalTempView("src") { - val globalTempDB = spark.sharedState.globalTempViewManager.database + val globalTempDB = spark.sharedState.globalTempDB sql("CREATE GLOBAL TEMP VIEW src AS SELECT 1 AS a, '2' AS b") sql(s"CREATE TABLE t4 LIKE $globalTempDB.src USING parquet") val table = catalog.getTableMetadata(TableIdentifier("t4")) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala index facbfa3dedf8c..982c568d09a79 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala @@ -206,7 +206,7 @@ trait TruncateTableSuiteBase extends QueryTest with DDLCommandTestUtils { ) } - val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2" + val v2 = s"${spark.sharedState.globalTempDB}.v2" withGlobalTempView("v2") { sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t") checkError( @@ -245,7 +245,7 @@ trait TruncateTableSuiteBase extends QueryTest with DDLCommandTestUtils { checkCachedRelation("v1", Seq(Row(0, 0, 0))) } - val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2" + val v2 = s"${spark.sharedState.globalTempDB}.v2" withGlobalTempView("v2") { sql(s"INSERT INTO $t PARTITION (width = 10, length = 10) SELECT 10") sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableAddPartitionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableAddPartitionSuite.scala index 71f04159638aa..dac99c8ff7023 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableAddPartitionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableAddPartitionSuite.scala @@ -133,7 +133,7 @@ trait AlterTableAddPartitionSuiteBase extends command.AlterTableAddPartitionSuit checkCachedRelation("v1", Seq(Row(0, 0), Row(0, 1), Row(0, 2))) } - val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2" + val v2 = s"${spark.sharedState.globalTempDB}.v2" withGlobalTempView("v2") { sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t") cacheRelation(v2) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala index b733666f0d84a..defa026c0e281 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala @@ -105,7 +105,7 @@ class AlterTableAddPartitionSuite checkCachedRelation("v1", Seq(Row(0, 0), Row(0, 1), Row(1, 2))) } - val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2" + val v2 = s"${spark.sharedState.globalTempDB}.v2" withGlobalTempView(v2) { sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t") cacheRelation(v2) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala index 6d250a6b302fc..507dfc2ec50eb 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala @@ -109,7 +109,7 @@ private[hive] class SparkGetColumnsOperation( } // Global temporary views - val globalTempViewDb = catalog.globalTempViewManager.database + val globalTempViewDb = catalog.globalTempDatabase val databasePattern = Pattern.compile(CLIServiceUtils.patternToRegex(schemaName)) if (databasePattern.matcher(globalTempViewDb).matches()) { catalog.globalTempViewManager.listViewNames(tablePattern).foreach { globalTempView => diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala index 2381ee78314d5..db1cf201b2e92 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala @@ -79,7 +79,7 @@ private[hive] class SparkGetSchemasOperation( rowSet.addRow(Array[AnyRef](dbName, DEFAULT_HIVE_CATALOG)) } - val globalTempViewDb = sqlContext.sessionState.catalog.globalTempViewManager.database + val globalTempViewDb = sqlContext.sessionState.catalog.globalTempDatabase val databasePattern = Pattern.compile(CLIServiceUtils.patternToRegex(schemaName)) if (schemaName == null || schemaName.isEmpty || databasePattern.matcher(globalTempViewDb).matches()) { diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala index ffd519087a9a7..9d90878050678 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala @@ -103,7 +103,7 @@ private[hive] class SparkGetTablesOperation( // Temporary views and global temporary views if (tableTypes == null || tableTypes.isEmpty || tableTypes.contains(VIEW.name)) { - val globalTempViewDb = catalog.globalTempViewManager.database + val globalTempViewDb = catalog.globalTempDatabase val databasePattern = Pattern.compile(CLIServiceUtils.patternToRegex(schemaName)) val tempViews = if (databasePattern.matcher(globalTempViewDb).matches()) { catalog.listTables(globalTempViewDb, tablePattern, includeLocalTempViews = true) diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala index d058dd1c4dab9..1b13c7c5f89ad 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala @@ -35,7 +35,7 @@ trait ThriftServerWithSparkContextSuite extends SharedThriftServer { test("SPARK-29911: Uncache cached tables when session closed") { val cacheManager = spark.sharedState.cacheManager - val globalTempDB = spark.sharedState.globalTempViewManager.database + val globalTempDB = spark.sharedState.globalTempDB withJdbcStatement() { statement => statement.execute("CACHE TABLE tempTbl AS SELECT 1") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSharedStateSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSharedStateSuite.scala index e3b649f9a9f01..d84b9f7960231 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSharedStateSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSharedStateSuite.scala @@ -76,7 +76,7 @@ class HiveSharedStateSuite extends SparkFunSuite { assert(client.getConf("hive.metastore.warehouse.dir", "") === qualifiedWHPath, "session level conf should be passed to catalog") - assert(state.globalTempViewManager.database === tmpDb) + assert(state.globalTempDB === tmpDb) val ss2 = builder.config("spark.foo", "bar2222").config(WAREHOUSE_PATH.key, invalidPath).getOrCreate()