From f5de7deb2b5638c7f2b13b5e2d133ccad2c78f97 Mon Sep 17 00:00:00 2001 From: Michael Armbrust Date: Wed, 6 May 2015 23:42:14 +0000 Subject: [PATCH] cleanup --- .../hive/thriftserver/SparkSQLCLIDriver.scala | 3 --- .../HiveThriftServer2Suites.scala | 2 +- .../execution/HiveCompatibilitySuite.scala | 13 ++++++----- .../apache/spark/sql/hive/HiveContext.scala | 4 +--- .../sql/hive/client/ClientInterface.scala | 22 +++++++++---------- .../spark/sql/hive/client/ClientWrapper.scala | 2 +- .../hive/client/IsolatedClientLoader.scala | 4 ++-- .../spark/sql/hive/client/VersionsSuite.scala | 8 +++---- 8 files changed, 27 insertions(+), 31 deletions(-) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 327420920cdf8..7c4d50dd56cb0 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -151,9 +151,6 @@ private[hive] object SparkSQLCLIDriver { case e: UnsupportedEncodingException => System.exit(3) } - // TODO: SET commands seem to be using the wrong session? - sessionState.getConf.set("spark.sql.hive.version", HiveShim.version) - if (sessionState.database != null) { SparkSQLEnv.hiveContext.runSqlHive(s"USE ${sessionState.database}") } diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala index 79c1451e3bfdc..1fadea97fd07f 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala @@ -539,7 +539,7 @@ abstract class HiveThriftServer2Test extends FunSuite with BeforeAndAfterAll wit diagnosisBuffer.clear() // Retries up to 3 times with different port numbers if the server fails to start - Seq.empty.foldLeft(Try(startThriftServer(listeningPort, 0))) { case (started, attempt) => + (1 to 3).foldLeft(Try(startThriftServer(listeningPort, 0))) { case (started, attempt) => started.orElse { listeningPort += 1 stopThriftServer() diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala index 2e79127739b39..b6245a57074c8 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala @@ -242,14 +242,15 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { // https://issues.apache.org/jira/browse/HIVE-7673 (in Hive 0.14 and trunk). "input46", - "combine1", // BROKEN - - "part_inherit_tbl_props", // BROKEN - "part_inherit_tbl_props_with_star", // BROKEN + // These tests were broken by the hive client isolation PR. + "part_inherit_tbl_props", + "part_inherit_tbl_props_with_star", - "nullformatCTAS", // NEED TO FINISH CTAS parser + "nullformatCTAS", // SPARK-7411: need to finish CTAS parser - "load_dyn_part14.*" // These work along but fail when run with other tests... + // The isolated classloader seemed to make some of our test reset mechanisms less robust. + "combine1", // This test changes compression settings in a way that breaks all subsequent tests. + "load_dyn_part14.*" // These work alone but fail when run with other tests... ) ++ HiveShim.compatibilityBlackList /** diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index 455a2ecf7f0ea..d433f9de0a994 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -109,7 +109,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) { /** * The location of the jars that should be used to instantiate the HiveMetastoreClient. This - * property can be one of three option: + * property can be one of three options: * - a colon-separated list of jar files or directories for hive and hadoop. * - builtin - attempt to discover the jars that were used to load Spark SQL and use those. This * option is only valid when using the execution version of Hive. @@ -362,8 +362,6 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) { override def dialect: String = getConf(SQLConf.DIALECT, "hiveql") } - protected[hive] def localSession = executionHive.state - /** * SQLConf and HiveConf contracts: * diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala index 3ae9e1bf57ca8..0a1d761a52f88 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala @@ -22,30 +22,30 @@ import java.util.{Map => JMap} import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException} -case class HiveDatabase( +private[hive] case class HiveDatabase( name: String, location: String) -abstract class TableType { val name: String } -case object ExternalTable extends TableType { override val name = "EXTERNAL_TABLE" } -case object IndexTable extends TableType { override val name = "INDEX_TABLE" } -case object ManagedTable extends TableType { override val name = "MANAGED_TABLE" } -case object VirtualView extends TableType { override val name = "VIRTUAL_VIEW" } +private[hive] abstract class TableType { val name: String } +private[hive] case object ExternalTable extends TableType { override val name = "EXTERNAL_TABLE" } +private[hive] case object IndexTable extends TableType { override val name = "INDEX_TABLE" } +private[hive] case object ManagedTable extends TableType { override val name = "MANAGED_TABLE" } +private[hive] case object VirtualView extends TableType { override val name = "VIRTUAL_VIEW" } // TODO: Use this for Tables and Partitions -case class HiveStorageDescriptor( +private[hive] case class HiveStorageDescriptor( location: String, inputFormat: String, outputFormat: String, serde: String, serdeProperties: Map[String, String]) -case class HivePartition( +private[hive] case class HivePartition( values: Seq[String], storage: HiveStorageDescriptor) -case class HiveColumn(name: String, hiveType: String, comment: String) -case class HiveTable( +private[hive] case class HiveColumn(name: String, hiveType: String, comment: String) +private[hive] case class HiveTable( specifiedDatabase: Option[String], name: String, schema: Seq[HiveColumn], @@ -82,7 +82,7 @@ case class HiveTable( * internal and external classloaders for a given version of Hive and thus must expose only * shared classes. */ -trait ClientInterface { +private[hive] trait ClientInterface { /** * Runs a HiveQL command using Hive, returning the results as a list of strings. Each row will * result in one string. diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala index 42c74aae0f1f5..6bca9d0179fe3 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala @@ -55,7 +55,7 @@ import org.apache.spark.sql.execution.QueryExecutionException * @param config a collection of configuration options that will be added to the hive conf before * opening the hive client. */ -class ClientWrapper( +private[hive] class ClientWrapper( version: HiveVersion, config: Map[String, String]) extends ClientInterface diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala index 7e212e9fafae6..7f94c93ba49c1 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala @@ -33,7 +33,7 @@ import org.apache.spark.sql.catalyst.util.quietly import org.apache.spark.sql.hive.HiveContext /** Factory for `IsolatedClientLoader` with specific versions of hive. */ -object IsolatedClientLoader { +private[hive] object IsolatedClientLoader { /** * Creates isolated Hive client loaders by downloading the requested version from maven. */ @@ -100,7 +100,7 @@ object IsolatedClientLoader { * @param baseClassLoader The spark classloader that is used to load shared classes. * */ -class IsolatedClientLoader( +private[hive] class IsolatedClientLoader( val version: HiveVersion, val execJars: Seq[URL] = Seq.empty, val config: Map[String, String] = Map.empty, diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala index d1f1a6e48dc75..321dc8d7322b8 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala @@ -23,10 +23,10 @@ import org.apache.spark.util.Utils import org.scalatest.FunSuite /** - * A simple set of tests that call the methods of a hive ClientInterface, loading different version of hive - * from maven central. These tests are simple in that they are mostly just testing to make sure that - * reflective calls are not throwing NoSuchMethod error, but the actually functionallity is not fully - * tested. + * A simple set of tests that call the methods of a hive ClientInterface, loading different version + * of hive from maven central. These tests are simple in that they are mostly just testing to make + * sure that reflective calls are not throwing NoSuchMethod error, but the actually functionallity + * is not fully tested. */ class VersionsSuite extends FunSuite with Logging { private def buildConf() = {