Skip to content

Commit

Permalink
Patch up.
Browse files Browse the repository at this point in the history
  • Loading branch information
rxin committed Jan 21, 2015
1 parent 2c4fc5a commit 526c3b0
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 50 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,61 +60,61 @@ case class SimpleDDLScan(from: Int, to: Int)(@transient val sqlContext: SQLConte
map(e => Row(s"people$e",e*2))
}

class DDLTestSuit extends DataSourceTest {
class DDLTestSuite extends DataSourceTest {
import caseInsensisitiveContext._

before {
sql(
"""
|CREATE TEMPORARY TABLE ddlPeople
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
sql(
"""
|CREATE TEMPORARY TABLE ddlPeople
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
}

sqlTest(
"describe ddlPeople",
Seq(
Row("intType", "int", null),
Row("stringType", "string", null),
Row("dateType", "date", null),
Row("timestampType", "timestamp", null),
Row("doubleType", "double", null),
Row("bigintType", "bigint", null),
Row("tinyintType", "tinyint", null),
Row("decimalType", "decimal(10,0)", null),
Row("fixedDecimalType", "decimal(5,1)", null),
Row("binaryType", "binary", null),
Row("booleanType", "boolean", null),
Row("smallIntType", "smallint", null),
Row("floatType", "float", null),
Row("mapType", "map<string,string>", null),
Row("arrayType", "array<string>", null),
Row("structType", "struct<f1:string,f2:int>", null)
))
"describe ddlPeople",
Seq(
Row("intType", "int", null),
Row("stringType", "string", null),
Row("dateType", "date", null),
Row("timestampType", "timestamp", null),
Row("doubleType", "double", null),
Row("bigintType", "bigint", null),
Row("tinyintType", "tinyint", null),
Row("decimalType", "decimal(10,0)", null),
Row("fixedDecimalType", "decimal(5,1)", null),
Row("binaryType", "binary", null),
Row("booleanType", "boolean", null),
Row("smallIntType", "smallint", null),
Row("floatType", "float", null),
Row("mapType", "map<string,string>", null),
Row("arrayType", "array<string>", null),
Row("structType", "struct<f1:string,f2:int>", null)
))

sqlTest(
"describe extended ddlPeople",
Seq(
Row("intType", "int", null),
Row("stringType", "string", null),
Row("dateType", "date", null),
Row("timestampType", "timestamp", null),
Row("doubleType", "double", null),
Row("bigintType", "bigint", null),
Row("tinyintType", "tinyint", null),
Row("decimalType", "decimal(10,0)", null),
Row("fixedDecimalType", "decimal(5,1)", null),
Row("binaryType", "binary", null),
Row("booleanType", "boolean", null),
Row("smallIntType", "smallint", null),
Row("floatType", "float", null),
Row("mapType", "map<string,string>", null),
Row("arrayType", "array<string>", null),
Row("structType", "struct<f1:string,f2:int>", null)
// Row("# extended", null, null)
))
"describe extended ddlPeople",
Seq(
Row("intType", "int", null),
Row("stringType", "string", null),
Row("dateType", "date", null),
Row("timestampType", "timestamp", null),
Row("doubleType", "double", null),
Row("bigintType", "bigint", null),
Row("tinyintType", "tinyint", null),
Row("decimalType", "decimal(10,0)", null),
Row("fixedDecimalType", "decimal(5,1)", null),
Row("binaryType", "binary", null),
Row("booleanType", "boolean", null),
Row("smallIntType", "smallint", null),
Row("floatType", "float", null),
Row("mapType", "map<string,string>", null),
Row("arrayType", "array<string>", null),
Row("structType", "struct<f1:string,f2:int>", null)
// Row("# extended", null, null)
))
}
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
val ddlPlan = ddlParser(sqlText)
val basicPlan = try {
HiveQl.parseSql(sqlText)
}catch {
} catch {
case e: Exception if ddlPlan.nonEmpty => ddlPlan.get
case e: Throwable => throw e
}
Expand Down

0 comments on commit 526c3b0

Please sign in to comment.