Skip to content

Commit

Permalink
minor change
Browse files Browse the repository at this point in the history
  • Loading branch information
zhzhan authored and liancheng committed May 16, 2015
1 parent 305418c commit 4e61c16
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
val tempDir = getTempFilePath("orcTest").getCanonicalPath
val range = (0 to 255)
val data = sparkContext.parallelize(range)
.map(x => AllDataTypes(s"$x", x, x.toLong, x.toFloat, x.toDouble, x.toShort, x.toByte, x % 2 == 0))
.map(x =>
AllDataTypes(s"$x", x, x.toLong, x.toFloat,x.toDouble, x.toShort, x.toByte, x % 2 == 0))
data.toDF().saveAsOrcFile(tempDir)
checkAnswer(
TestHive.orcFile(tempDir),
Expand All @@ -101,7 +102,8 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {

test("read/write binary data") {
val tempDir = getTempFilePath("orcTest").getCanonicalPath
sparkContext.parallelize(BinaryData("test".getBytes("utf8")) :: Nil).toDF().saveAsOrcFile(tempDir)
sparkContext.parallelize(BinaryData("test".getBytes("utf8")) :: Nil)
.toDF().saveAsOrcFile(tempDir)
TestHive.orcFile(tempDir)
.map(r => new String(r(0).asInstanceOf[Array[Byte]], "utf8"))
.collect().toSeq == Seq("test")
Expand Down Expand Up @@ -136,7 +138,8 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
rdd.foreach {
// '===' does not like string comparison?
row: Row => {
assert(row.getString(1).equals(s"val_$counter"), s"row $counter value ${row.getString(1)} does not match val_$counter")
assert(row.getString(1).equals(s"val_$counter"),
s"row $counter value ${row.getString(1)} does not match val_$counter")
counter = counter + 1
}
}
Expand Down Expand Up @@ -173,7 +176,7 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {

// We only support zlib in hive0.12.0 now
test("Default Compression options for writing to an Orcfile") {
//TODO: support other compress codec
// TODO: support other compress codec
var tempDir = getTempFilePath("orcTest").getCanonicalPath
val rdd = sparkContext.parallelize((1 to 100))
.map(i => TestRDDEntry(i, s"val_$i"))
Expand All @@ -184,7 +187,7 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
}

// Following codec is supported in hive-0.13.1, ignore it now
ignore("Other Compression options for writing to an Orcfile only supported in hive 0.13.1 and above") {
ignore("Other Compression options for writing to an Orcfile - 0.13.1 and above") {
TestHive.sparkContext.hadoopConfiguration.set(orcDefaultCompressVar, "SNAPPY")
var tempDir = getTempFilePath("orcTest").getCanonicalPath
val rdd = sparkContext.parallelize((1 to 100))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -530,4 +530,4 @@ class FSBasedOrcRelationSuite extends OrcRelationTest {
"dataSchema" -> dataSchemaWithPartition.json)))
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,8 @@ abstract class OrcSuite extends QueryTest with BeforeAndAfterAll {
}

test("overwrite insert") {
sql("insert overwrite table normal_orc_as_source select * from orc_temp_table where intField > 5")
sql("insert overwrite table normal_orc_as_source select * " +
"from orc_temp_table where intField > 5")
checkAnswer(
sql("select * from normal_orc_as_source"),
Row(6, "part-6") ::
Expand Down

0 comments on commit 4e61c16

Please sign in to comment.