Skip to content

Commit

Permalink
Add tests and conversions
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxGekk committed Apr 15, 2021
1 parent ba92de0 commit 57d49b4
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, Scala
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeUtils.instantToMicros
import org.apache.spark.sql.catalyst.util.IntervalUtils.{durationToMicros, periodToMonths}
import org.apache.spark.sql.catalyst.util.IntervalStringStyles.ANSI_STYLE
import org.apache.spark.sql.catalyst.util.IntervalUtils.{durationToMicros, periodToMonths, toDayTimeIntervalString, toYearMonthIntervalString}
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
Expand Down Expand Up @@ -317,6 +318,8 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression {
DateFormatter(timeZoneId).format(value.asInstanceOf[Int])
case TimestampType =>
TimestampFormatter.getFractionFormatter(timeZoneId).format(value.asInstanceOf[Long])
case DayTimeIntervalType => toDayTimeIntervalString(value.asInstanceOf[Long], ANSI_STYLE)
case YearMonthIntervalType => toYearMonthIntervalString(value.asInstanceOf[Int], ANSI_STYLE)
case _ =>
other.toString
}
Expand Down Expand Up @@ -437,6 +440,8 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression {
case (i: CalendarInterval, CalendarIntervalType) =>
s"INTERVAL '${i.toString}'"
case (v: Array[Byte], BinaryType) => s"X'${DatatypeConverter.printHexBinary(v)}'"
case (i: Long, DayTimeIntervalType) => toDayTimeIntervalString(i, ANSI_STYLE)
case (i: Int, YearMonthIntervalType) => toYearMonthIntervalString(i, ANSI_STYLE)
case _ => value.toString
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.expressions

import java.nio.charset.StandardCharsets
import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period, ZoneOffset}
import java.time.temporal.ChronoUnit
import java.util.TimeZone

import scala.reflect.runtime.universe.TypeTag
Expand Down Expand Up @@ -385,4 +386,30 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
val period1 = Period.ofMonths(-1024)
checkEvaluation(Literal(Array(period0, period1)), Array(period0, period1))
}

test("SPARK-35099: convert a literal of day-time interval to SQL string") {
Seq(
Duration.ofDays(-1) -> "-1 00:00:00",
Duration.of(10, ChronoUnit.MICROS) -> "0 00:00:00.00001",
Duration.of(MICROS_PER_DAY - 1, ChronoUnit.MICROS) -> "0 23:59:59.999999"
).foreach { case (duration, intervalPayload) =>
val literal = Literal.apply(duration)
val expected = s"INTERVAL '$intervalPayload' DAY TO SECOND"
assert(literal.sql === expected)
assert(literal.toString === expected)
}
}

test("SPARK-35099: convert a literal of year-month interval to SQL string") {
Seq(
Period.ofYears(-1) -> "-1-0",
Period.of(9999, 11, 0) -> "9999-11",
Period.ofMonths(-11) -> "-0-11"
).foreach { case (period, intervalPayload) =>
val literal = Literal.apply(period)
val expected = s"INTERVAL '$intervalPayload' YEAR TO MONTH"
assert(literal.sql === expected)
assert(literal.toString === expected)
}
}
}

0 comments on commit 57d49b4

Please sign in to comment.