Skip to content

Commit

Permalink
Migrate tests
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxGekk committed Feb 15, 2024
1 parent 7b40d7b commit be87add
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import java.util.concurrent.TimeUnit

import scala.util.control.NonFatal

import org.apache.spark.SparkIllegalArgumentException
import org.apache.spark.{SparkIllegalArgumentException, SparkThrowable}
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
Expand Down Expand Up @@ -202,6 +202,7 @@ object IntervalUtils extends SparkIntervalUtils {
try {
f
} catch {
case e: SparkThrowable => throw e
case NonFatal(e) =>
throw new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_3213",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class CSVExprUtilsSuite extends SparkFunSuite {
// backslash, then tab
("""\\t""", Some("""\t"""), None),
// invalid special character (dot)
("""\.""", None, Some("Unsupported special character for delimiter")),
("""\.""", None, Some("_LEGACY_ERROR_TEMP_3236")),
// backslash, then dot
("""\\.""", Some("""\."""), None),
// nothing special, just straight conversion
Expand All @@ -90,17 +90,16 @@ class CSVExprUtilsSuite extends SparkFunSuite {
)

test("should correctly produce separator strings, or exceptions, from input") {
forAll(testCases) { (input, separatorStr, expectedErrorMsg) =>
forAll(testCases) { (input, separatorStr, expectedErrorClass) =>
try {
val separator = CSVExprUtils.toDelimiterStr(input)
assert(separatorStr.isDefined)
assert(expectedErrorMsg.isEmpty)
assert(expectedErrorClass.isEmpty)
assert(separator.equals(separatorStr.get))
} catch {
case e: IllegalArgumentException =>
case e: SparkIllegalArgumentException =>
assert(separatorStr.isEmpty)
assert(expectedErrorMsg.isDefined)
assert(e.getMessage.contains(expectedErrorMsg.get))
assert(e.getErrorClass === expectedErrorClass.get)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import java.time.{Duration, LocalDate, LocalDateTime, Period}
import java.time.temporal.ChronoUnit
import java.util.{Calendar, Locale, TimeZone}

import org.apache.spark.SparkFunSuite
import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException}
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
Expand Down Expand Up @@ -1105,9 +1105,10 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {

Seq("INTERVAL '-178956970-9' YEAR TO MONTH", "INTERVAL '178956970-8' YEAR TO MONTH")
.foreach { interval =>
checkExceptionInExpression[IllegalArgumentException](
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), YearMonthIntervalType()),
"Error parsing interval year-month string: integer overflow")
"_LEGACY_ERROR_TEMP_3213",
Map("interval" -> "year-month", "msg" -> "integer overflow"))
}

Seq(Byte.MaxValue, Short.MaxValue, Int.MaxValue, Int.MinValue + 1, Int.MinValue)
Expand Down Expand Up @@ -1173,13 +1174,15 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {

Seq("INTERVAL '1-1' YEAR", "INTERVAL '1-1' MONTH").foreach { interval =>
val dataType = YearMonthIntervalType()
val expectedMsg = s"Interval string does not match year-month format of " +
s"${IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", ")} " +
s"when cast to ${dataType.typeName}: $interval"
checkExceptionInExpression[IllegalArgumentException](
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
expectedMsg
"_LEGACY_ERROR_TEMP_3214",
Map(
"fallBackNotice" -> "",
"typeName" -> "interval year to month",
"intervalStr" -> "year-month",
"supportedFormat" -> "`[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR`",
"input" -> interval)
)
}
Seq(("1", YearMonthIntervalType(YEAR, MONTH)),
Expand All @@ -1193,13 +1196,17 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
("INTERVAL '1' MONTH", YearMonthIntervalType(YEAR)),
("INTERVAL '1' MONTH", YearMonthIntervalType(YEAR, MONTH)))
.foreach { case (interval, dataType) =>
val expectedMsg = s"Interval string does not match year-month format of " +
s"${IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", ")} " +
s"when cast to ${dataType.typeName}: $interval"
checkExceptionInExpression[IllegalArgumentException](
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
expectedMsg)
"_LEGACY_ERROR_TEMP_3214",
Map(
"fallBackNotice" -> "",
"typeName" -> dataType.typeName,
"intervalStr" -> "year-month",
"supportedFormat" ->
IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", "),
"input" -> interval))
}
}

Expand Down Expand Up @@ -1313,15 +1320,17 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
("1.23", DayTimeIntervalType(MINUTE)),
("1.23", DayTimeIntervalType(MINUTE)))
.foreach { case (interval, dataType) =>
val expectedMsg = s"Interval string does not match day-time format of " +
s"${IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", ")} " +
s"when cast to ${dataType.typeName}: $interval, " +
s"set ${SQLConf.LEGACY_FROM_DAYTIME_STRING.key} to true " +
"to restore the behavior before Spark 3.0."
checkExceptionInExpression[IllegalArgumentException](
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
expectedMsg
"_LEGACY_ERROR_TEMP_3214",
Map("fallBackNotice" -> (", set spark.sql.legacy.fromDayTimeString.enabled" +
" to true to restore the behavior before Spark 3.0."),
"intervalStr" -> "day-time",
"typeName" -> dataType.typeName,
"input" -> interval,
"supportedFormat" ->
IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", "))
)
}

Expand All @@ -1337,15 +1346,17 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
("INTERVAL '1537228672801:54.7757' MINUTE TO SECOND", DayTimeIntervalType(MINUTE, SECOND)),
("INTERVAL '92233720368541.775807' SECOND", DayTimeIntervalType(SECOND)))
.foreach { case (interval, dataType) =>
val expectedMsg = "Interval string does not match day-time format of " +
s"${IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", ")} " +
s"when cast to ${dataType.typeName}: $interval, " +
s"set ${SQLConf.LEGACY_FROM_DAYTIME_STRING.key} to true " +
"to restore the behavior before Spark 3.0."
checkExceptionInExpression[IllegalArgumentException](
checkErrorInExpression[SparkIllegalArgumentException](
cast(Literal.create(interval), dataType),
expectedMsg)
"_LEGACY_ERROR_TEMP_3214",
Map("fallBackNotice" -> (", set spark.sql.legacy.fromDayTimeString.enabled" +
" to true to restore the behavior before Spark 3.0."),
"intervalStr" -> "day-time",
"typeName" -> dataType.typeName,
"input" -> interval,
"supportedFormat" ->
IntervalUtils.supportedFormat((dataType.startField, dataType.endField))
.map(format => s"`$format`").mkString(", ")))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import scala.language.postfixOps
import scala.reflect.ClassTag
import scala.util.Random

import org.apache.spark.{SparkArithmeticException, SparkDateTimeException, SparkException, SparkFunSuite, SparkUpgradeException}
import org.apache.spark.{SparkArithmeticException, SparkDateTimeException, SparkException, SparkFunSuite, SparkIllegalArgumentException, SparkUpgradeException}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
import org.apache.spark.sql.catalyst.util.{DateTimeUtils, IntervalUtils, TimestampFormatter}
Expand Down Expand Up @@ -434,9 +434,12 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
}

withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) {
checkExceptionInExpression[IllegalArgumentException](
checkErrorInExpression[SparkIllegalArgumentException](
DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))),
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date")
"_LEGACY_ERROR_TEMP_2000",
Map("message" ->
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}

withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) {
Expand Down Expand Up @@ -1499,7 +1502,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
}

Seq('q', 'Q', 'e', 'c', 'A', 'n', 'N', 'p').foreach { l =>
checkException[IllegalArgumentException](l.toString)
checkException[SparkIllegalArgumentException](l.toString)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,13 @@ trait ExpressionEvalHelper extends ScalaCheckDrivenPropertyChecks with PlanTestB
checkErrorInExpression[T](expression, InternalRow.empty, errorClass, parameters)
}

protected def checkErrorInExpression[T <: SparkThrowable : ClassTag](
expression: => Expression,
inputRow: InternalRow,
errorClass: String): Unit = {
checkErrorInExpression[T](expression, inputRow, errorClass, Map.empty[String, String])
}

protected def checkErrorInExpression[T <: SparkThrowable : ClassTag](
expression: => Expression,
inputRow: InternalRow,
Expand Down

0 comments on commit be87add

Please sign in to comment.