From 82d58baa98235f912141b2b40cdcd3bf0bdbe744 Mon Sep 17 00:00:00 2001 From: Terry Kim Date: Tue, 23 Feb 2021 20:36:54 -0800 Subject: [PATCH] Address PR comments --- .../spark/sql/catalyst/analysis/Analyzer.scala | 8 ++++---- .../plans/logical/basicLogicalOperators.scala | 2 +- .../sql/catalyst/analysis/AnalysisTest.scala | 17 ++++------------- 3 files changed, 9 insertions(+), 18 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 214958981760e..2cec2cc5c3706 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -869,7 +869,7 @@ class Analyzer(override val catalogManager: CatalogManager) } } - private def getTempViewRawPlan(plan: LogicalPlan): LogicalPlan = { + private def unwrapRelationPlan(plan: LogicalPlan): LogicalPlan = { EliminateSubqueryAliases(plan) match { case v: View if v.isDataFrameTempView => v.child case other => other @@ -900,7 +900,7 @@ class Analyzer(override val catalogManager: CatalogManager) case write: V2WriteCommand => write.table match { case UnresolvedRelation(ident, _, false) => - lookupTempView(ident, performCheck = true).map(getTempViewRawPlan).map { + lookupTempView(ident, performCheck = true).map(unwrapRelationPlan).map { case r: DataSourceV2Relation => write.withNewTable(r) case _ => throw QueryCompilationErrors.writeIntoTempViewNotAllowedError(ident.quoted) }.getOrElse(write) @@ -1154,8 +1154,8 @@ class Analyzer(override val catalogManager: CatalogManager) // Inserting into a file-based temporary view is allowed. // (e.g., spark.read.parquet("path").createOrReplaceTempView("t"). - // Thus, we need to look at the raw plan of a temporary view. - getTempViewRawPlan(relation) match { + // Thus, we need to look at the raw plan if `relation` is a temporary view. + unwrapRelationPlan(relation) match { case v: View => throw QueryCompilationErrors.insertIntoViewNotAllowedError(v.desc.identifier, table) case other => i.copy(table = other) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala index 1c5f5fc2c60ff..3f20d8f67b44d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala @@ -445,7 +445,7 @@ case class InsertIntoDir( /** * A container for holding the view description(CatalogTable) and info whether the view is temporary - * or not. If the view description is available, the child should be a logical plan parsed from the + * or not. If it's a SQL (temp) view, the child should be a logical plan parsed from the * `CatalogTable.viewText`. Otherwise, the view is a temporary one created from a dataframe and the * view description should contain a `VIEW_CREATED_FROM_DATAFRAME` property; in this case, the child * must be already resolved. diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala index 1546976b5640f..7248424a68ad9 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala @@ -59,25 +59,16 @@ trait AnalysisTest extends PlanTest { } } - protected def checkAnalysisWithTransform( - inputPlan: LogicalPlan, - expectedPlan: LogicalPlan, - caseSensitive: Boolean = true)(transform: LogicalPlan => LogicalPlan): Unit = { - withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) { - val analyzer = getAnalyzer - val actualPlan = analyzer.executeAndCheck(inputPlan, new QueryPlanningTracker) - comparePlans(transform(actualPlan), expectedPlan) - } - } - protected def checkAnalysisWithoutViewWrapper( inputPlan: LogicalPlan, expectedPlan: LogicalPlan, caseSensitive: Boolean = true): Unit = { - checkAnalysisWithTransform(inputPlan, expectedPlan, caseSensitive) { plan => - plan transformUp { + withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) { + val actualPlan = getAnalyzer.executeAndCheck(inputPlan, new QueryPlanningTracker) + val transformed = actualPlan transformUp { case v: View if v.isDataFrameTempView => v.child } + comparePlans(transformed, expectedPlan) } }