diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json
index 3e90bfc6da3cd..6e9611f375038 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -4688,7 +4688,7 @@
"SHOW CREATE TABLE doesn't support transactional Hive table. Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead."
]
},
- "WITH_UNNECESSARY_SERDE_PARAMETER" : {
+ "ON_SPARK_DATA_SOURCE_TABLE_WITH_AS_SERDE" : {
"message" : [
" is a Spark data source table. Use `SHOW CREATE TABLE` without `AS SERDE` instead."
]
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 1afcd9877f4a3..05c475f9d608d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2918,7 +2918,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
def showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError(
table: TableIdentifier): Throwable = {
new AnalysisException(
- errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNNECESSARY_SERDE_PARAMETER",
+ errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_SPARK_DATA_SOURCE_TABLE_WITH_AS_SERDE",
messageParameters = Map("table" -> table.toString))
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
index f2cfdbf74dbfc..e44a90288578d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala
@@ -162,7 +162,7 @@ trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
exception = intercept[AnalysisException] {
getShowCreateDDL(t, true)
},
- errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNNECESSARY_SERDE_PARAMETER",
+ errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_SPARK_DATA_SOURCE_TABLE_WITH_AS_SERDE",
sqlState = "0A000",
parameters = Map("table" -> "`spark_catalog`.`ns1`.`tbl`")
)