Skip to content

Commit

Permalink
[SPARK-24669][SQL] Invalidate tables in case of DROP DATABASE CASCADE
Browse files Browse the repository at this point in the history
  ## What changes were proposed in this pull request?
Before dropping database refresh the tables of that database, so as to refresh all cached entries associated with those tables.
We follow the same when dropping a table.

UT is added

Closes apache#23905 from Udbhav30/SPARK-24669.

Authored-by: Udbhav30 <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
(cherry picked from commit 9bddf71)
Signed-off-by: Dongjoon Hyun <[email protected]>
  • Loading branch information
Udbhav30 authored and kai-chi committed Jul 23, 2019
1 parent f34e0d6 commit 960e221
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,11 @@ class SessionCatalog(
if (dbName == DEFAULT_DATABASE) {
throw new AnalysisException(s"Can not drop default database")
}
if (cascade && databaseExists(dbName)) {
listTables(dbName).foreach { t =>
invalidateCachedTable(QualifiedTableName(dbName, t.table))
}
}
externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.execution.command

import java.io.File
import java.io.{File, PrintWriter}
import java.net.URI
import java.util.Locale

Expand Down Expand Up @@ -2715,4 +2715,40 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}
}
}

test("Refresh table before drop database cascade") {
withTempDir { tempDir =>
val file1 = new File(tempDir + "/first.csv")
val writer1 = new PrintWriter(file1)
writer1.write("first")
writer1.close()

val file2 = new File(tempDir + "/second.csv")
val writer2 = new PrintWriter(file2)
writer2.write("second")
writer2.close()

withDatabase("foo") {
withTable("foo.first") {
sql("CREATE DATABASE foo")
sql(
s"""CREATE TABLE foo.first (id STRING)
|USING csv OPTIONS (path='${file1.toURI}')
""".stripMargin)
sql("SELECT * FROM foo.first")
checkAnswer(spark.table("foo.first"), Row("first"))

// Dropping the database and again creating same table with different path
sql("DROP DATABASE foo CASCADE")
sql("CREATE DATABASE foo")
sql(
s"""CREATE TABLE foo.first (id STRING)
|USING csv OPTIONS (path='${file2.toURI}')
""".stripMargin)
sql("SELECT * FROM foo.first")
checkAnswer(spark.table("foo.first"), Row("second"))
}
}
}
}
}

0 comments on commit 960e221

Please sign in to comment.