diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index 2ea73e83c7439..47eebc574586e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -43,7 +43,8 @@ import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, Scala import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.catalyst.util.DateTimeUtils.instantToMicros -import org.apache.spark.sql.catalyst.util.IntervalUtils.{durationToMicros, periodToMonths} +import org.apache.spark.sql.catalyst.util.IntervalStringStyles.ANSI_STYLE +import org.apache.spark.sql.catalyst.util.IntervalUtils.{durationToMicros, periodToMonths, toDayTimeIntervalString, toYearMonthIntervalString} import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ @@ -317,6 +318,8 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression { DateFormatter(timeZoneId).format(value.asInstanceOf[Int]) case TimestampType => TimestampFormatter.getFractionFormatter(timeZoneId).format(value.asInstanceOf[Long]) + case DayTimeIntervalType => toDayTimeIntervalString(value.asInstanceOf[Long], ANSI_STYLE) + case YearMonthIntervalType => toYearMonthIntervalString(value.asInstanceOf[Int], ANSI_STYLE) case _ => other.toString } @@ -437,6 +440,8 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression { case (i: CalendarInterval, CalendarIntervalType) => s"INTERVAL '${i.toString}'" case (v: Array[Byte], BinaryType) => s"X'${DatatypeConverter.printHexBinary(v)}'" + case (i: Long, DayTimeIntervalType) => toDayTimeIntervalString(i, ANSI_STYLE) + case (i: Int, YearMonthIntervalType) => toYearMonthIntervalString(i, ANSI_STYLE) case _ => value.toString } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala index f8766f3fd27a5..a5f70fd172b08 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.expressions import java.nio.charset.StandardCharsets import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period, ZoneOffset} +import java.time.temporal.ChronoUnit import java.util.TimeZone import scala.reflect.runtime.universe.TypeTag @@ -385,4 +386,30 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { val period1 = Period.ofMonths(-1024) checkEvaluation(Literal(Array(period0, period1)), Array(period0, period1)) } + + test("SPARK-35099: convert a literal of day-time interval to SQL string") { + Seq( + Duration.ofDays(-1) -> "-1 00:00:00", + Duration.of(10, ChronoUnit.MICROS) -> "0 00:00:00.00001", + Duration.of(MICROS_PER_DAY - 1, ChronoUnit.MICROS) -> "0 23:59:59.999999" + ).foreach { case (duration, intervalPayload) => + val literal = Literal.apply(duration) + val expected = s"INTERVAL '$intervalPayload' DAY TO SECOND" + assert(literal.sql === expected) + assert(literal.toString === expected) + } + } + + test("SPARK-35099: convert a literal of year-month interval to SQL string") { + Seq( + Period.ofYears(-1) -> "-1-0", + Period.of(9999, 11, 0) -> "9999-11", + Period.ofMonths(-11) -> "-0-11" + ).foreach { case (period, intervalPayload) => + val literal = Literal.apply(period) + val expected = s"INTERVAL '$intervalPayload' YEAR TO MONTH" + assert(literal.sql === expected) + assert(literal.toString === expected) + } + } }