Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into SPARK-29527
Browse files Browse the repository at this point in the history
  • Loading branch information
viirya committed Oct 25, 2019
2 parents cd8fb2c + dec99d8 commit 6cb88c9
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ statement
| REFRESH (STRING | .*?) #refreshResource
| CACHE LAZY? TABLE multipartIdentifier
(OPTIONS options=tablePropertyList)? (AS? query)? #cacheTable
| UNCACHE TABLE (IF EXISTS)? tableIdentifier #uncacheTable
| UNCACHE TABLE (IF EXISTS)? multipartIdentifier #uncacheTable
| CLEAR CACHE #clearCache
| LOAD DATA LOCAL? INPATH path=STRING OVERWRITE? INTO TABLE
tableIdentifier partitionSpec? #loadData
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2828,6 +2828,13 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
CacheTableStatement(tableName, query, ctx.LAZY != null, options)
}

/**
* Create an [[UncacheTableStatement]] logical plan.
*/
override def visitUncacheTable(ctx: UncacheTableContext): LogicalPlan = withOrigin(ctx) {
UncacheTableStatement(visitMultipartIdentifier(ctx.multipartIdentifier), ctx.EXISTS != null)
}

/**
* Create a [[TruncateTableStatement]] command.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,13 @@ case class CacheTableStatement(
isLazy: Boolean,
options: Map[String, String]) extends ParsedStatement

/**
* An UNCACHE TABLE statement, as parsed from SQL
*/
case class UncacheTableStatement(
tableName: Seq[String],
ifExists: Boolean) extends ParsedStatement

/**
* A TRUNCATE TABLE statement, as parsed from SQL
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1039,7 +1039,7 @@ class DDLParserSuite extends AnalysisTest {
"missing 'COLUMNS' at '<EOF>'")
}

test("MSCK REPAIR table") {
test("MSCK REPAIR TABLE") {
comparePlans(
parsePlan("MSCK REPAIR TABLE a.b.c"),
RepairTableStatement(Seq("a", "b", "c")))
Expand All @@ -1051,7 +1051,7 @@ class DDLParserSuite extends AnalysisTest {
ShowCreateTableStatement(Seq("a", "b", "c")))
}

test("CACHE table") {
test("CACHE TABLE") {
comparePlans(
parsePlan("CACHE TABLE a.b.c"),
CacheTableStatement(Seq("a", "b", "c"), None, false, Map.empty))
Expand All @@ -1068,6 +1068,16 @@ class DDLParserSuite extends AnalysisTest {
"It is not allowed to add catalog/namespace prefix a.b")
}

test("UNCACHE TABLE") {
comparePlans(
parsePlan("UNCACHE TABLE a.b.c"),
UncacheTableStatement(Seq("a", "b", "c"), ifExists = false))

comparePlans(
parsePlan("UNCACHE TABLE IF EXISTS a.b.c"),
UncacheTableStatement(Seq("a", "b", "c"), ifExists = true))
}

test("TRUNCATE table") {
comparePlans(
parsePlan("TRUNCATE TABLE a.b.c"),
Expand Down Expand Up @@ -1104,7 +1114,7 @@ class DDLParserSuite extends AnalysisTest {
comparePlans(parsed5, expected5)
}

test("REFRESH TABLE table") {
test("REFRESH TABLE") {
comparePlans(
parsePlan("REFRESH TABLE a.b.c"),
RefreshTableStatement(Seq("a", "b", "c")))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, LookupCatalog, TableChange, V1Table}
import org.apache.spark.sql.connector.expressions.Transform
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowCreateTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand}
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowCreateTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand, UncacheTableCommand}
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource, RefreshTable}
import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2
import org.apache.spark.sql.internal.SQLConf
Expand Down Expand Up @@ -307,6 +307,10 @@ class ResolveSessionCatalog(
val v1TableName = parseV1Table(tableName, "CACHE TABLE")
CacheTableCommand(v1TableName.asTableIdentifier, plan, isLazy, options)

case UncacheTableStatement(tableName, ifExists) =>
val v1TableName = parseV1Table(tableName, "UNCACHE TABLE")
UncacheTableCommand(v1TableName.asTableIdentifier, ifExists)

case TruncateTableStatement(tableName, partitionSpec) =>
val v1TableName = parseV1Table(tableName, "TRUNCATE TABLE")
TruncateTableCommand(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,13 +157,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
unquotedPath
}

/**
* Create an [[UncacheTableCommand]] logical plan.
*/
override def visitUncacheTable(ctx: UncacheTableContext): LogicalPlan = withOrigin(ctx) {
UncacheTableCommand(visitTableIdentifier(ctx.tableIdentifier), ctx.EXISTS != null)
}

/**
* Create a [[ClearCacheCommand]] logical plan.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1290,6 +1290,16 @@ class DataSourceV2SQLSuite
}
}

test("UNCACHE TABLE") {
val t = "testcat.ns1.ns2.tbl"
withTable(t) {
sql(s"CREATE TABLE $t (id bigint, data string) USING foo")

testV1Command("UNCACHE TABLE", t)
testV1Command("UNCACHE TABLE", s"IF EXISTS $t")
}
}

private def testV1Command(sqlCommand: String, sqlParams: String): Unit = {
val e = intercept[AnalysisException] {
sql(s"$sqlCommand $sqlParams")
Expand Down

0 comments on commit 6cb88c9

Please sign in to comment.