From 447e17bed3122e4100b33898bd002ea1a0d2b84e Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 14 Apr 2021 22:35:02 +0300 Subject: [PATCH 01/20] Change the parsing method --- .../spark/sql/catalyst/parser/AstBuilder.scala | 17 +++++++++++++++-- .../spark/sql/errors/QueryParsingErrors.scala | 7 +++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index c7af21cabfe8f..ff090271d48c0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -18,6 +18,7 @@ package org.apache.spark.sql.catalyst.parser import java.util.Locale +import java.util.concurrent.TimeUnit import javax.xml.bind.DatatypeConverter import scala.collection.JavaConverters._ @@ -2302,12 +2303,24 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg } /** - * Create a [[CalendarInterval]] literal expression. Two syntaxes are supported: + * Create a [[CalendarInterval]] or ANSI interval literal expression. + * Two syntaxes are supported: * - multiple unit value pairs, for instance: interval 2 months 2 days. * - from-to unit, for instance: interval '1-2' year to month. */ override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) { - Literal(parseIntervalLiteral(ctx), CalendarIntervalType) + val parsedInterval = parseIntervalLiteral(ctx) + if (SQLConf.get.legacyIntervalEnabled) { + Literal(parsedInterval, CalendarIntervalType) + } else if (parsedInterval.months != 0) { + if (parsedInterval.days != 0 || parsedInterval.microseconds != 0) { + throw QueryParsingErrors.mixedIntervalError(ctx) + } + Literal(parsedInterval.months, YearMonthIntervalType) + } else { + val micros = IntervalUtils.getDuration(parsedInterval, TimeUnit.MICROSECONDS) + Literal(micros, DayTimeIntervalType) + } } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index d97b19954f63e..d5e619e8b11e9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -22,6 +22,7 @@ import org.antlr.v4.runtime.ParserRuleContext import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ import org.apache.spark.sql.catalyst.trees.Origin +import org.apache.spark.sql.internal.SQLConf /** * Object for grouping all error messages of the query parsing. @@ -367,4 +368,10 @@ object QueryParsingErrors { new ParseException("LOCAL is supported only with file: scheme", ctx) } + def mixedIntervalError(ctx: IntervalContext): Throwable = { + new ParseException( + "Mixing of year-month and day-time fields is not allowed. " + + s"Set '${SQLConf.LEGACY_INTERVAL_ENABLED.key}' to true to enable the legacy interval type " + + "which supports mixed fields.", ctx) + } } From 8cab0f046320d6963353ec7a88da8ab5dbcd9091 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 14 Apr 2021 22:41:38 +0300 Subject: [PATCH 02/20] Re-gen sql.out --- .../sql-tests/results/ansi/datetime.sql.out | 20 +- .../sql-tests/results/ansi/interval.sql.out | 191 +++++++++------- .../sql-tests/results/datetime.sql.out | 20 +- .../sql-tests/results/interval.sql.out | 204 +++++++++++------- .../results/postgreSQL/interval.sql.out | 56 ++--- 5 files changed, 286 insertions(+), 205 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out index 4455b8a4ee3a1..b82ca536ff742 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -291,7 +291,7 @@ select timestamp '2019-01-01中文' -- !query select timestamp'2011-11-11 11:11:11' + interval '2' day -- !query schema -struct +struct -- !query output 2011-11-13 11:11:11 @@ -299,7 +299,7 @@ struct -- !query select timestamp'2011-11-11 11:11:11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 11:11:11 @@ -307,25 +307,23 @@ struct -- !query select date'2011-11-11 11:11:11' + interval '2' second -- !query schema -struct<> +struct -- !query output -java.lang.IllegalArgumentException -requirement failed: Cannot add hours, minutes or seconds, milliseconds, microseconds to a date +2011-11-11 00:00:02 -- !query select date'2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<> +struct -- !query output -java.lang.IllegalArgumentException -requirement failed: Cannot add hours, minutes or seconds, milliseconds, microseconds to a date +2011-11-10 23:59:58 -- !query select '2011-11-11' - interval '2' day -- !query schema -struct<2011-11-11 - INTERVAL '2 days':string> +struct<2011-11-11 - 172800000000:string> -- !query output 2011-11-09 00:00:00 @@ -333,7 +331,7 @@ struct<2011-11-11 - INTERVAL '2 days':string> -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<2011-11-11 11:11:11 - INTERVAL '2 seconds':string> +struct<2011-11-11 11:11:11 - 2000000:string> -- !query output 2011-11-11 11:11:09 @@ -353,7 +351,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- 2000000)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 324d1510a4862..60736474a94af 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -13,9 +13,15 @@ struct +struct<> -- !query output -6 months 21 days 0.000005 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval 4 month 2 weeks 3 microseconds * 1.5 +-------^^^ -- !query @@ -41,7 +47,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -integer overflow +Overflow -- !query @@ -50,7 +56,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -integer overflow +long overflow -- !query @@ -59,7 +65,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -integer overflow +long overflow -- !query @@ -106,9 +112,15 @@ struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> -- !query select -interval -1 month 1 day -1 second -- !query schema -struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output -1 months -1 days 1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select -interval -1 month 1 day -1 second +--------^^^ -- !query @@ -122,57 +134,63 @@ struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval> -- !query select +interval -1 month 1 day -1 second -- !query schema -struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output --1 months 1 days -1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select +interval -1 month 1 day -1 second +--------^^^ -- !query select interval -'1-1' year to month -- !query schema -struct +struct<-13:year-month interval> -- !query output --1 years -1 months +-1-1 -- !query select interval -'-1-1' year to month -- !query schema -struct +struct<13:year-month interval> -- !query output -1 years 1 months +1-1 -- !query select interval +'-1-1' year to month -- !query schema -struct +struct<-13:year-month interval> -- !query output --1 years -1 months +-1-1 -- !query select interval - '1 2:3:4.001' day to second -- !query schema -struct +struct<-93784001000:day-time interval> -- !query output --1 days -2 hours -3 minutes -4.001 seconds +-1 02:03:04.001000000 -- !query select interval +'1 2:3:4.001' day to second -- !query schema -struct +struct<93784001000:day-time interval> -- !query output -1 days 2 hours 3 minutes 4.001 seconds +1 02:03:04.001000000 -- !query select interval -'-1 2:3:4.001' day to second -- !query schema -struct +struct<93784001000:day-time interval> -- !query output -1 days 2 hours 3 minutes 4.001 seconds +1 02:03:04.001000000 -- !query @@ -307,113 +325,125 @@ NULL -- !query select interval 13.123456789 seconds, interval -13.123456789 second -- !query schema -struct +struct<13123456:day-time interval,-13123456:day-time interval> -- !query output -13.123456 seconds -13.123456 seconds +0 00:00:13.123456000 -0 00:00:13.123456000 -- !query select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond -- !query schema -struct +struct<> -- !query output -1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond +-------^^^ -- !query select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second -- !query schema -struct +struct<> -- !query output -32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second +-------^^^ -- !query select interval '0 0:0:0.1' day to second -- !query schema -struct +struct<100000:day-time interval> -- !query output -0.1 seconds +0 00:00:00.100000000 -- !query select interval '10-9' year to month -- !query schema -struct +struct<129:year-month interval> -- !query output -10 years 9 months +10-9 -- !query select interval '20 15' day to hour -- !query schema -struct +struct<1782000000000:day-time interval> -- !query output -20 days 15 hours +20 15:00:00.000000000 -- !query select interval '20 15:40' day to minute -- !query schema -struct +struct<1784400000000:day-time interval> -- !query output -20 days 15 hours 40 minutes +20 15:40:00.000000000 -- !query select interval '20 15:40:32.99899999' day to second -- !query schema -struct +struct<1784432998999:day-time interval> -- !query output -20 days 15 hours 40 minutes 32.998999 seconds +20 15:40:32.998999000 -- !query select interval '15:40' hour to minute -- !query schema -struct +struct<56400000000:day-time interval> -- !query output -15 hours 40 minutes +0 15:40:00.000000000 -- !query select interval '15:40:32.99899999' hour to second -- !query schema -struct +struct<56432998999:day-time interval> -- !query output -15 hours 40 minutes 32.998999 seconds +0 15:40:32.998999000 -- !query select interval '40:32.99899999' minute to second -- !query schema -struct +struct<2432998999:day-time interval> -- !query output -40 minutes 32.998999 seconds +0 00:40:32.998999000 -- !query select interval '40:32' minute to second -- !query schema -struct +struct<2432000000:day-time interval> -- !query output -40 minutes 32 seconds +0 00:40:32.000000000 -- !query select interval 30 day day -- !query schema -struct +struct -- !query output -30 days +30 00:00:00.000000000 -- !query select interval 30 days days -- !query schema -struct +struct -- !query output -30 days +30 00:00:00.000000000 -- !query @@ -517,9 +547,9 @@ select interval 10 nanoseconds -- !query select map(1, interval 1 day, 2, interval 3 week) -- !query schema -struct> +struct> -- !query output -{1:1 days,2:21 days} +{1:1 00:00:00.000000000,2:21 00:00:00.000000000} -- !query @@ -786,7 +816,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -802,7 +832,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -813,9 +843,9 @@ select interval '2-2' year to month - interval '3-3' year to month from interval_arithmetic -- !query schema -struct<(INTERVAL '2 years 2 months' + INTERVAL '3 years 3 months'):interval,(INTERVAL '2 years 2 months' - INTERVAL '3 years 3 months'):interval> +struct<(26 + 39):year-month interval,(26 - 39):year-month interval> -- !query output -5 years 5 months -1 years -1 months +5-5 -1-1 -- !query @@ -829,10 +859,9 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query schema -struct<> +struct -- !query output -java.lang.IllegalArgumentException -requirement failed: Cannot add hours, minutes or seconds, milliseconds, microseconds to a date +2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 -- !query @@ -846,7 +875,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -862,7 +891,7 @@ select interval '99 11:22:33.123456789' day to second + strval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -873,9 +902,9 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query schema -struct<(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' + INTERVAL '10 days 9 hours 8 minutes 7.123456 seconds'):interval,(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' - INTERVAL '10 days 9 hours 8 minutes 7.123456 seconds'):interval> +struct<(8594553123456 + 896887123456):day-time interval,(8594553123456 - 896887123456):day-time interval> -- !query output -109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds +109 20:30:40.246912000 89 02:14:26.000000000 -- !query @@ -905,25 +934,25 @@ struct -- !query select interval '1\t' day -- !query schema -struct +struct<86400000000:day-time interval> -- !query output -1 days +1 00:00:00.000000000 -- !query select interval '1 ' day -- !query schema -struct +struct<86400000000:day-time interval> -- !query output -1 days +1 00:00:00.000000000 -- !query select interval '2-2\t' year to month -- !query schema -struct +struct<26:year-month interval> -- !query output -2 years 2 months +2-2 -- !query @@ -943,9 +972,9 @@ select interval '-\t2-2\t' year to month -- !query select interval '\n0 12:34:46.789\t' day to second -- !query schema -struct +struct<45286789000:day-time interval> -- !query output -12 hours 34 minutes 46.789 seconds +0 12:34:46.789000000 -- !query @@ -1057,9 +1086,9 @@ SELECT to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), from_csv(to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), 'a interval, b interval') -- !query schema -struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, INTERVAL '2 years 8 months', b, INTERVAL '1 hours 10 minutes')):string,from_csv(to_csv(named_struct(a, INTERVAL '2 years 8 months', b, INTERVAL '1 hours 10 minutes'))):struct> +struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, 32, b, 4200000000)):string,from_csv(to_csv(named_struct(a, 32, b, 4200000000))):struct> -- !query output -{"a":1,"b":1 days} 1,1 days 2 years 8 months,1 hours 10 minutes {"a":2 years 8 months,"b":1 hours 10 minutes} +{"a":1,"b":1 days} 1,1 days 32,4200000000 {"a":null,"b":null} -- !query @@ -1069,9 +1098,19 @@ SELECT to_json(map('a', interval 25 month 100 day 130 minute)), from_json(to_json(map('a', interval 25 month 100 day 130 minute)), 'a interval') -- !query schema -struct,to_json(from_json({"a":"1 days"})):string,to_json(map(a, INTERVAL '2 years 1 months 100 days 2 hours 10 minutes')):string,from_json(to_json(map(a, INTERVAL '2 years 1 months 100 days 2 hours 10 minutes'))):struct> +struct<> -- !query output -{"a":1 days} {"a":"1 days"} {"a":"2 years 1 months 100 days 2 hours 10 minutes"} {"a":2 years 1 months 100 days 2 hours 10 minutes} +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 4, pos 19) + +== SQL == +SELECT + from_json('{"a":"1 days"}', 'a interval'), + to_json(from_json('{"a":"1 days"}', 'a interval')), + to_json(map('a', interval 25 month 100 day 130 minute)), +-------------------^^^ + from_json(to_json(map('a', interval 25 month 100 day 130 minute)), 'a interval') -- !query @@ -1203,6 +1242,6 @@ select interval 'interval 1' day -- !query select interval '-\t 1' day -- !query schema -struct +struct<-86400000000:day-time interval> -- !query output --1 days +-1 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index e93f0c8439efb..003c1706874fb 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -271,7 +271,7 @@ select timestamp '2019-01-01中文' -- !query select timestamp'2011-11-11 11:11:11' + interval '2' day -- !query schema -struct +struct -- !query output 2011-11-13 11:11:11 @@ -279,7 +279,7 @@ struct -- !query select timestamp'2011-11-11 11:11:11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 11:11:11 @@ -287,23 +287,23 @@ struct -- !query select date'2011-11-11 11:11:11' + interval '2' second -- !query schema -struct +struct -- !query output -2011-11-11 +2011-11-11 00:00:02 -- !query select date'2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output -2011-11-10 +2011-11-10 23:59:58 -- !query select '2011-11-11' - interval '2' day -- !query schema -struct<2011-11-11 - INTERVAL '2 days':string> +struct<2011-11-11 - 172800000000:string> -- !query output 2011-11-09 00:00:00 @@ -311,7 +311,7 @@ struct<2011-11-11 - INTERVAL '2 days':string> -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<2011-11-11 11:11:11 - INTERVAL '2 seconds':string> +struct<2011-11-11 11:11:11 - 2000000:string> -- !query output 2011-11-11 11:11:09 @@ -319,7 +319,7 @@ struct<2011-11-11 11:11:11 - INTERVAL '2 seconds':string> -- !query select '1' - interval '2' second -- !query schema -struct<1 - INTERVAL '2 seconds':string> +struct<1 - 2000000:string> -- !query output NULL @@ -330,7 +330,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- 2000000)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 33a3c63e5e07b..f029803659c6a 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -13,9 +13,15 @@ struct +struct<> -- !query output -6 months 21 days 0.000005 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval 4 month 2 weeks 3 microseconds * 1.5 +-------^^^ -- !query @@ -29,33 +35,37 @@ struct +struct<> -- !query output -178956970 years 7 months +java.lang.ArithmeticException +integer overflow -- !query select interval 2147483647 month / 0.5 -- !query schema -struct +struct<> -- !query output -178956970 years 7 months +java.lang.ArithmeticException +Overflow -- !query select interval 2147483647 day * 2 -- !query schema -struct +struct<> -- !query output -2147483647 days 2562047788 hours 54.775807 seconds +java.lang.ArithmeticException +long overflow -- !query select interval 2147483647 day / 0.5 -- !query schema -struct +struct<> -- !query output -2147483647 days 2562047788 hours 54.775807 seconds +java.lang.ArithmeticException +long overflow -- !query @@ -101,9 +111,15 @@ struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> -- !query select -interval -1 month 1 day -1 second -- !query schema -struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output -1 months -1 days 1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select -interval -1 month 1 day -1 second +--------^^^ -- !query @@ -117,57 +133,63 @@ struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval> -- !query select +interval -1 month 1 day -1 second -- !query schema -struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output --1 months 1 days -1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select +interval -1 month 1 day -1 second +--------^^^ -- !query select interval -'1-1' year to month -- !query schema -struct +struct<-13:year-month interval> -- !query output --1 years -1 months +-1-1 -- !query select interval -'-1-1' year to month -- !query schema -struct +struct<13:year-month interval> -- !query output -1 years 1 months +1-1 -- !query select interval +'-1-1' year to month -- !query schema -struct +struct<-13:year-month interval> -- !query output --1 years -1 months +-1-1 -- !query select interval - '1 2:3:4.001' day to second -- !query schema -struct +struct<-93784001000:day-time interval> -- !query output --1 days -2 hours -3 minutes -4.001 seconds +-1 02:03:04.001000000 -- !query select interval +'1 2:3:4.001' day to second -- !query schema -struct +struct<93784001000:day-time interval> -- !query output -1 days 2 hours 3 minutes 4.001 seconds +1 02:03:04.001000000 -- !query select interval -'-1 2:3:4.001' day to second -- !query schema -struct +struct<93784001000:day-time interval> -- !query output -1 days 2 hours 3 minutes 4.001 seconds +1 02:03:04.001000000 -- !query @@ -301,113 +323,125 @@ NULL -- !query select interval 13.123456789 seconds, interval -13.123456789 second -- !query schema -struct +struct<13123456:day-time interval,-13123456:day-time interval> -- !query output -13.123456 seconds -13.123456 seconds +0 00:00:13.123456000 -0 00:00:13.123456000 -- !query select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond -- !query schema -struct +struct<> -- !query output -1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond +-------^^^ -- !query select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second -- !query schema -struct +struct<> -- !query output -32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second +-------^^^ -- !query select interval '0 0:0:0.1' day to second -- !query schema -struct +struct<100000:day-time interval> -- !query output -0.1 seconds +0 00:00:00.100000000 -- !query select interval '10-9' year to month -- !query schema -struct +struct<129:year-month interval> -- !query output -10 years 9 months +10-9 -- !query select interval '20 15' day to hour -- !query schema -struct +struct<1782000000000:day-time interval> -- !query output -20 days 15 hours +20 15:00:00.000000000 -- !query select interval '20 15:40' day to minute -- !query schema -struct +struct<1784400000000:day-time interval> -- !query output -20 days 15 hours 40 minutes +20 15:40:00.000000000 -- !query select interval '20 15:40:32.99899999' day to second -- !query schema -struct +struct<1784432998999:day-time interval> -- !query output -20 days 15 hours 40 minutes 32.998999 seconds +20 15:40:32.998999000 -- !query select interval '15:40' hour to minute -- !query schema -struct +struct<56400000000:day-time interval> -- !query output -15 hours 40 minutes +0 15:40:00.000000000 -- !query select interval '15:40:32.99899999' hour to second -- !query schema -struct +struct<56432998999:day-time interval> -- !query output -15 hours 40 minutes 32.998999 seconds +0 15:40:32.998999000 -- !query select interval '40:32.99899999' minute to second -- !query schema -struct +struct<2432998999:day-time interval> -- !query output -40 minutes 32.998999 seconds +0 00:40:32.998999000 -- !query select interval '40:32' minute to second -- !query schema -struct +struct<2432000000:day-time interval> -- !query output -40 minutes 32 seconds +0 00:40:32.000000000 -- !query select interval 30 day day -- !query schema -struct +struct -- !query output -30 days +30 00:00:00.000000000 -- !query select interval 30 days days -- !query schema -struct +struct -- !query output -30 days +30 00:00:00.000000000 -- !query @@ -511,9 +545,9 @@ select interval 10 nanoseconds -- !query select map(1, interval 1 day, 2, interval 3 week) -- !query schema -struct> +struct> -- !query output -{1:1 days,2:21 days} +{1:1 00:00:00.000000000,2:21 00:00:00.000000000} -- !query @@ -780,7 +814,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -796,7 +830,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -807,9 +841,9 @@ select interval '2-2' year to month - interval '3-3' year to month from interval_arithmetic -- !query schema -struct<(INTERVAL '2 years 2 months' + INTERVAL '3 years 3 months'):interval,(INTERVAL '2 years 2 months' - INTERVAL '3 years 3 months'):interval> +struct<(26 + 39):year-month interval,(26 - 39):year-month interval> -- !query output -5 years 5 months -1 years -1 months +5-5 -1-1 -- !query @@ -823,9 +857,9 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output -2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 +2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 -- !query @@ -839,7 +873,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -855,7 +889,7 @@ select interval '99 11:22:33.123456789' day to second + strval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -866,9 +900,9 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query schema -struct<(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' + INTERVAL '10 days 9 hours 8 minutes 7.123456 seconds'):interval,(INTERVAL '99 days 11 hours 22 minutes 33.123456 seconds' - INTERVAL '10 days 9 hours 8 minutes 7.123456 seconds'):interval> +struct<(8594553123456 + 896887123456):day-time interval,(8594553123456 - 896887123456):day-time interval> -- !query output -109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds +109 20:30:40.246912000 89 02:14:26.000000000 -- !query @@ -898,25 +932,25 @@ struct -- !query select interval '1\t' day -- !query schema -struct +struct<86400000000:day-time interval> -- !query output -1 days +1 00:00:00.000000000 -- !query select interval '1 ' day -- !query schema -struct +struct<86400000000:day-time interval> -- !query output -1 days +1 00:00:00.000000000 -- !query select interval '2-2\t' year to month -- !query schema -struct +struct<26:year-month interval> -- !query output -2 years 2 months +2-2 -- !query @@ -936,9 +970,9 @@ select interval '-\t2-2\t' year to month -- !query select interval '\n0 12:34:46.789\t' day to second -- !query schema -struct +struct<45286789000:day-time interval> -- !query output -12 hours 34 minutes 46.789 seconds +0 12:34:46.789000000 -- !query @@ -1045,9 +1079,9 @@ SELECT to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), from_csv(to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), 'a interval, b interval') -- !query schema -struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, INTERVAL '2 years 8 months', b, INTERVAL '1 hours 10 minutes')):string,from_csv(to_csv(named_struct(a, INTERVAL '2 years 8 months', b, INTERVAL '1 hours 10 minutes'))):struct> +struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, 32, b, 4200000000)):string,from_csv(to_csv(named_struct(a, 32, b, 4200000000))):struct> -- !query output -{"a":1,"b":1 days} 1,1 days 2 years 8 months,1 hours 10 minutes {"a":2 years 8 months,"b":1 hours 10 minutes} +{"a":1,"b":1 days} 1,1 days 32,4200000000 {"a":null,"b":null} -- !query @@ -1057,9 +1091,19 @@ SELECT to_json(map('a', interval 25 month 100 day 130 minute)), from_json(to_json(map('a', interval 25 month 100 day 130 minute)), 'a interval') -- !query schema -struct,to_json(from_json({"a":"1 days"})):string,to_json(map(a, INTERVAL '2 years 1 months 100 days 2 hours 10 minutes')):string,from_json(to_json(map(a, INTERVAL '2 years 1 months 100 days 2 hours 10 minutes'))):struct> +struct<> -- !query output -{"a":1 days} {"a":"1 days"} {"a":"2 years 1 months 100 days 2 hours 10 minutes"} {"a":2 years 1 months 100 days 2 hours 10 minutes} +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 4, pos 19) + +== SQL == +SELECT + from_json('{"a":"1 days"}', 'a interval'), + to_json(from_json('{"a":"1 days"}', 'a interval')), + to_json(map('a', interval 25 month 100 day 130 minute)), +-------------------^^^ + from_json(to_json(map('a', interval 25 month 100 day 130 minute)), 'a interval') -- !query @@ -1191,6 +1235,6 @@ select interval 'interval 1' day -- !query select interval '-\t 1' day -- !query schema -struct +struct<-86400000000:day-time interval> -- !query output --1 days +-1 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index 62d47410aab65..df12d2607e3c6 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -5,97 +5,97 @@ -- !query SELECT interval '999' second -- !query schema -struct +struct<999000000:day-time interval> -- !query output -16 minutes 39 seconds +0 00:16:39.000000000 -- !query SELECT interval '999' minute -- !query schema -struct +struct<59940000000:day-time interval> -- !query output -16 hours 39 minutes +0 16:39:00.000000000 -- !query SELECT interval '999' hour -- !query schema -struct +struct<3596400000000:day-time interval> -- !query output -999 hours +41 15:00:00.000000000 -- !query SELECT interval '999' day -- !query schema -struct +struct<86313600000000:day-time interval> -- !query output -999 days +999 00:00:00.000000000 -- !query SELECT interval '999' month -- !query schema -struct +struct<999:year-month interval> -- !query output -83 years 3 months +83-3 -- !query SELECT interval '1' year -- !query schema -struct +struct<12:year-month interval> -- !query output -1 years +1-0 -- !query SELECT interval '2' month -- !query schema -struct +struct<2:year-month interval> -- !query output -2 months +0-2 -- !query SELECT interval '3' day -- !query schema -struct +struct<259200000000:day-time interval> -- !query output -3 days +3 00:00:00.000000000 -- !query SELECT interval '4' hour -- !query schema -struct +struct<14400000000:day-time interval> -- !query output -4 hours +0 04:00:00.000000000 -- !query SELECT interval '5' minute -- !query schema -struct +struct<300000000:day-time interval> -- !query output -5 minutes +0 00:05:00.000000000 -- !query SELECT interval '6' second -- !query schema -struct +struct<6000000:day-time interval> -- !query output -6 seconds +0 00:00:06.000000000 -- !query SELECT interval '1-2' year to month -- !query schema -struct +struct<14:year-month interval> -- !query output -1 years 2 months +1-2 -- !query @@ -129,9 +129,9 @@ SELECT interval '1 2:03:04' day to hour -- !query SELECT interval '1 2:03' day to minute -- !query schema -struct +struct<93780000000:day-time interval> -- !query output -1 days 2 hours 3 minutes +1 02:03:00.000000000 -- !query @@ -165,9 +165,9 @@ SELECT interval '1 2:03' day to second -- !query SELECT interval '1 2:03:04' day to second -- !query schema -struct +struct<93784000000:day-time interval> -- !query output -1 days 2 hours 3 minutes 4 seconds +1 02:03:04.000000000 -- !query From 0d24c9b99482b509dcce7b75d21f057e54315740 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 09:15:30 +0300 Subject: [PATCH 03/20] Re-gen sql.out --- .../resources/sql-tests/results/cast.sql.out | 10 +- .../sql-tests/results/datetime-legacy.sql.out | 20 +- .../sql-tests/results/extract.sql.out | 486 +++++++++++------- .../native/dateTimeOperations.sql.out | 84 +-- .../sql-tests/results/udf/udf-window.sql.out | 13 +- .../sql-tests/results/window.sql.out | 13 +- 6 files changed, 359 insertions(+), 267 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 42d12b80be989..a2646eb6a7095 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -286,9 +286,15 @@ struct -- !query SELECT CAST(interval 3 month 1 hour AS string) -- !query schema -struct +struct<> -- !query output -3 months 1 hours +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 12) + +== SQL == +SELECT CAST(interval 3 month 1 hour AS string) +------------^^^ -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index ad312b20a9f15..2e9540521e485 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -271,7 +271,7 @@ select timestamp '2019-01-01中文' -- !query select timestamp'2011-11-11 11:11:11' + interval '2' day -- !query schema -struct +struct -- !query output 2011-11-13 11:11:11 @@ -279,7 +279,7 @@ struct -- !query select timestamp'2011-11-11 11:11:11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 11:11:11 @@ -287,23 +287,23 @@ struct -- !query select date'2011-11-11 11:11:11' + interval '2' second -- !query schema -struct +struct -- !query output -2011-11-11 +2011-11-11 00:00:02 -- !query select date'2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output -2011-11-10 +2011-11-10 23:59:58 -- !query select '2011-11-11' - interval '2' day -- !query schema -struct<2011-11-11 - INTERVAL '2 days':string> +struct<2011-11-11 - 172800000000:string> -- !query output 2011-11-09 00:00:00 @@ -311,7 +311,7 @@ struct<2011-11-11 - INTERVAL '2 days':string> -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<2011-11-11 11:11:11 - INTERVAL '2 seconds':string> +struct<2011-11-11 11:11:11 - 2000000:string> -- !query output 2011-11-11 11:11:09 @@ -319,7 +319,7 @@ struct<2011-11-11 11:11:11 - INTERVAL '2 seconds':string> -- !query select '1' - interval '2' second -- !query schema -struct<1 - INTERVAL '2 seconds':string> +struct<1 - 2000000:string> -- !query output NULL @@ -330,7 +330,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- 2000000)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out b/sql/core/src/test/resources/sql-tests/results/extract.sql.out index 35cfda1767c2a..3d702e1e3ca2a 100644 --- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out @@ -7,311 +7,355 @@ CREATE TEMPORARY VIEW t AS select '2011-05-06 07:08:09.1234567' as c, interval 1 -- !query schema struct<> -- !query output +org.apache.spark.sql.catalyst.parser.ParseException +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 70) + +== SQL == +CREATE TEMPORARY VIEW t AS select '2011-05-06 07:08:09.1234567' as c, interval 10 year 20 month 30 day 40 hour 50 minute 6.7890 second as i +----------------------------------------------------------------------^^^ -- !query select extract(year from c), extract(year from i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select extract(y from c), extract(y from i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select extract(years from c), extract(years from i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 57 -- !query select extract(yr from c), extract(yr from i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 51 -- !query select extract(yrs from c), extract(yrs from i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select extract(yearofweek from c) from t -- !query schema -struct +struct<> -- !query output -2011 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 39 -- !query select extract(quarter from c) from t -- !query schema -struct +struct<> -- !query output -2 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 36 -- !query select extract(qtr from c) from t -- !query schema -struct +struct<> -- !query output -2 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select extract(month from c), extract(month from i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 57 -- !query select extract(mon from c), extract(mon from i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select extract(mons from c), extract(mons from i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select extract(months from c), extract(months from i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 59 -- !query select extract(week from c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 33 -- !query select extract(w from c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 30 -- !query select extract(weeks from c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 34 -- !query select extract(day from c), extract(day from i) from t -- !query schema -struct +struct<> -- !query output -6 31 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select extract(d from c), extract(d from i) from t -- !query schema -struct +struct<> -- !query output -6 31 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select extract(days from c), extract(days from i) from t -- !query schema -struct +struct<> -- !query output -6 31 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select extract(dayofweek from c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 38 -- !query select extract(dow from c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select extract(dayofweek_iso from c) from t -- !query schema -struct +struct<> -- !query output -5 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 42 -- !query select extract(dow_iso from c) from t -- !query schema -struct +struct<> -- !query output -5 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 36 -- !query select extract(doy from c) from t -- !query schema -struct +struct<> -- !query output -126 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select extract(hour from c), extract(hour from i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select extract(h from c), extract(h from i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select extract(hours from c), extract(hours from i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 57 -- !query select extract(hr from c), extract(hr from i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 51 -- !query select extract(hrs from c), extract(hrs from i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select extract(minute from c), extract(minute from i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 59 -- !query select extract(m from c), extract(m from i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select extract(min from c), extract(min from i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select extract(mins from c), extract(mins from i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select extract(minutes from c), extract(minutes from i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 61 -- !query select extract(second from c), extract(second from i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 59 -- !query select extract(s from c), extract(s from i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select extract(sec from c), extract(sec from i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select extract(seconds from c), extract(seconds from i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 61 -- !query select extract(secs from c), extract(secs from i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query @@ -320,7 +364,7 @@ select extract(not_supported from c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7 +Table or view not found: t; line 1 pos 42 -- !query @@ -329,311 +373,349 @@ select extract(not_supported from i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the interval type.; line 1 pos 7 +Table or view not found: t; line 1 pos 42 -- !query select date_part('year', c), date_part('year', i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select date_part('y', c), date_part('y', i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select date_part('years', c), date_part('years', i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 57 -- !query select date_part('yr', c), date_part('yr', i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 51 -- !query select date_part('yrs', c), date_part('yrs', i) from t -- !query schema -struct +struct<> -- !query output -2011 11 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select date_part('yearofweek', c) from t -- !query schema -struct +struct<> -- !query output -2011 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 39 -- !query select date_part('quarter', c) from t -- !query schema -struct +struct<> -- !query output -2 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 36 -- !query select date_part('qtr', c) from t -- !query schema -struct +struct<> -- !query output -2 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select date_part('month', c), date_part('month', i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 57 -- !query select date_part('mon', c), date_part('mon', i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select date_part('mons', c), date_part('mons', i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select date_part('months', c), date_part('months', i) from t -- !query schema -struct +struct<> -- !query output -5 8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 59 -- !query select date_part('week', c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 33 -- !query select date_part('w', c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 30 -- !query select date_part('weeks', c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 34 -- !query select date_part('day', c), date_part('day', i) from t -- !query schema -struct +struct<> -- !query output -6 31 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select date_part('d', c), date_part('d', i) from t -- !query schema -struct +struct<> -- !query output -6 31 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select date_part('days', c), date_part('days', i) from t -- !query schema -struct +struct<> -- !query output -6 31 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select date_part('dayofweek', c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 38 -- !query select date_part('dow', c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select date_part('dayofweek_iso', c) from t -- !query schema -struct +struct<> -- !query output -5 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 42 -- !query select date_part('dow_iso', c) from t -- !query schema -struct +struct<> -- !query output -5 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 36 -- !query select date_part('doy', c) from t -- !query schema -struct +struct<> -- !query output -126 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select date_part('hour', c), date_part('hour', i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select date_part('h', c), date_part('h', i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select date_part('hours', c), date_part('hours', i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 57 -- !query select date_part('hr', c), date_part('hr', i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 51 -- !query select date_part('hrs', c), date_part('hrs', i) from t -- !query schema -struct +struct<> -- !query output -7 16 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select date_part('minute', c), date_part('minute', i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 59 -- !query select date_part('m', c), date_part('m', i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select date_part('min', c), date_part('min', i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select date_part('mins', c), date_part('mins', i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query select date_part('minutes', c), date_part('minutes', i) from t -- !query schema -struct +struct<> -- !query output -8 50 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 61 -- !query select date_part('second', c), date_part('second', i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 59 -- !query select date_part('s', c), date_part('s', i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 49 -- !query select date_part('sec', c), date_part('sec', i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 53 -- !query select date_part('seconds', c), date_part('seconds', i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 61 -- !query select date_part('secs', c), date_part('secs', i) from t -- !query schema -struct +struct<> -- !query output -9.123456 6.789000 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 55 -- !query @@ -642,7 +724,7 @@ select date_part('not_supported', c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7 +Table or view not found: t; line 1 pos 42 -- !query @@ -651,15 +733,16 @@ select date_part(c, c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -The field parameter needs to be a foldable string value.; line 1 pos 7 +Table or view not found: t; line 1 pos 28 -- !query select date_part(null, c) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 31 -- !query @@ -668,140 +751,157 @@ select date_part(i, i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -The field parameter needs to be a foldable string value.; line 1 pos 7 +Table or view not found: t; line 1 pos 28 -- !query select date_part(null, i) from t -- !query schema -struct +struct<> -- !query output -NULL +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 31 -- !query select extract('year', c) from t -- !query schema -struct +struct<> -- !query output -2011 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 31 -- !query select extract('quarter', c) from t -- !query schema -struct +struct<> -- !query output -2 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 34 -- !query select extract('month', c) from t -- !query schema -struct +struct<> -- !query output -5 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 32 -- !query select extract('week', c) from t -- !query schema -struct +struct<> -- !query output -18 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 31 -- !query select extract('day', c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 30 -- !query select extract('days', c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 31 -- !query select extract('dayofweek', c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 36 -- !query select extract('dow', c) from t -- !query schema -struct +struct<> -- !query output -6 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 30 -- !query select extract('doy', c) from t -- !query schema -struct +struct<> -- !query output -126 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 30 -- !query select extract('hour', c) from t -- !query schema -struct +struct<> -- !query output -7 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 31 -- !query select extract('minute', c) from t -- !query schema -struct +struct<> -- !query output -8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 33 -- !query select extract('second', c) from t -- !query schema -struct +struct<> -- !query output -9.123456 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 33 -- !query select c - i from t -- !query schema -struct +struct<> -- !query output -1999-08-05 14:18:02.334456 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 18 -- !query select year(c - i) from t -- !query schema -struct +struct<> -- !query output -1999 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 24 -- !query select extract(year from c - i) from t -- !query schema -struct +struct<> -- !query output -1999 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 37 -- !query select extract(month from to_timestamp(c) - i) from t -- !query schema -struct +struct<> -- !query output -8 +org.apache.spark.sql.AnalysisException +Table or view not found: t; line 1 pos 52 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index 9cad1e6887a09..05f2b491df86f 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -16,7 +16,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -25,7 +25,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -34,7 +34,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -43,7 +43,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -52,7 +52,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -61,7 +61,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -70,13 +70,13 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select cast('2017-12-11' as string) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 00:00:00 @@ -84,7 +84,7 @@ struct -- !query select cast('2017-12-11 09:30:00' as string) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -95,7 +95,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -104,13 +104,13 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select cast('2017-12-11 09:30:00.0' as timestamp) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -118,9 +118,9 @@ struct -- !query select cast('2017-12-11 09:30:00' as date) + interval 2 day -- !query schema -struct +struct -- !query output -2017-12-13 +2017-12-13 00:00:00 -- !query @@ -129,7 +129,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -138,7 +138,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -147,7 +147,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -156,7 +156,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -165,7 +165,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -174,7 +174,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -183,13 +183,13 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select interval 2 day + cast('2017-12-11' as string) -- !query schema -struct +struct -- !query output 2017-12-13 00:00:00 @@ -197,7 +197,7 @@ struct -- !query select interval 2 day + cast('2017-12-11 09:30:00' as string) -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -208,7 +208,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -217,13 +217,13 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select interval 2 day + cast('2017-12-11 09:30:00.0' as timestamp) -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -231,9 +231,9 @@ struct -- !query select interval 2 day + cast('2017-12-11 09:30:00' as date) -- !query schema -struct +struct -- !query output -2017-12-13 +2017-12-13 00:00:00 -- !query @@ -242,7 +242,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -251,7 +251,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -260,7 +260,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -269,7 +269,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -278,7 +278,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -287,7 +287,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -296,13 +296,13 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select cast('2017-12-11' as string) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 00:00:00 @@ -310,7 +310,7 @@ struct -- !query select cast('2017-12-11 09:30:00' as string) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 09:30:00 @@ -321,7 +321,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -330,13 +330,13 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select cast('2017-12-11 09:30:00.0' as timestamp) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 09:30:00 @@ -344,6 +344,6 @@ struct -- !query select cast('2017-12-11 09:30:00' as date) - interval 2 day -- !query schema -struct +struct -- !query output -2017-12-09 +2017-12-09 00:00:00 diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index d4c857d969163..911e22483b40c 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -154,17 +154,10 @@ SELECT val_timestamp, udf(cate), avg(val_timestamp) OVER(PARTITION BY udf(cate) RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData ORDER BY udf(cate), val_timestamp -- !query schema -struct +struct<> -- !query output -NULL NULL NULL -2017-07-31 17:00:00 NULL 1.5015456E9 -2017-07-31 17:00:00 a 1.5016970666666667E9 -2017-07-31 17:00:00 a 1.5016970666666667E9 -2017-08-05 23:13:20 a 1.502E9 -2020-12-30 16:00:00 a 1.6093728E9 -2017-07-31 17:00:00 b 1.5022728E9 -2017-08-17 13:00:00 b 1.503E9 -2020-12-30 16:00:00 b 1.6093728E9 +org.apache.spark.sql.AnalysisException +cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY testdata.val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 2001600000000 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'day-time interval' which is used in the range frame.; line 1 pos 56 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index c377658722641..7937c77006f61 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -198,17 +198,10 @@ SELECT val_timestamp, cate, avg(val_timestamp) OVER(PARTITION BY cate ORDER BY v RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData ORDER BY cate, val_timestamp -- !query schema -struct +struct<> -- !query output -NULL NULL NULL -2017-07-31 17:00:00 NULL 1.5015456E9 -2017-07-31 17:00:00 a 1.5016970666666667E9 -2017-07-31 17:00:00 a 1.5016970666666667E9 -2017-08-05 23:13:20 a 1.502E9 -2020-12-30 16:00:00 a 1.6093728E9 -2017-07-31 17:00:00 b 1.5022728E9 -2017-08-17 13:00:00 b 1.503E9 -2020-12-30 16:00:00 b 1.6093728E9 +org.apache.spark.sql.AnalysisException +cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 2001600000000 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'day-time interval' which is used in the range frame.; line 1 pos 51 -- !query From aa7303bd9c2f7ca03dea6baa2ff00bc9559d5123 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 09:49:48 +0300 Subject: [PATCH 04/20] Fix SparkMetadataOperationSuite --- .../sql/hive/thriftserver/SparkMetadataOperationSuite.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala index 897ea00975a05..bd4becd51e3e2 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala @@ -24,6 +24,7 @@ import org.apache.hive.service.cli.HiveSQLException import org.apache.spark.SPARK_VERSION import org.apache.spark.sql.catalyst.analysis.FunctionRegistry +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.util.VersionUtils @@ -355,6 +356,8 @@ class SparkMetadataOperationSuite extends HiveThriftServer2TestBase { val ddl = s"CREATE GLOBAL TEMP VIEW $viewName as select interval 1 day as i" withJdbcStatement(viewName) { statement => + val legacyIntervalEnabled = SQLConf.get.legacyIntervalEnabled + statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=true") statement.execute(ddl) val data = statement.getConnection.getMetaData val rowSet = data.getColumns("", "global_temp", viewName, null) @@ -374,6 +377,7 @@ class SparkMetadataOperationSuite extends HiveThriftServer2TestBase { assert(rowSet.getString("IS_NULLABLE") === "YES") assert(rowSet.getString("IS_AUTO_INCREMENT") === "NO") } + statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=$legacyIntervalEnabled") } } From f5e692a77f34766847820aebd3f89ce8167064a3 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 10:01:36 +0300 Subject: [PATCH 05/20] Fix HiveThriftBinaryServerSuite --- .../sql/hive/thriftserver/HiveThriftServer2Suites.scala | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala index b17a181a7f983..8ede94ad355ca 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala @@ -664,9 +664,12 @@ class HiveThriftBinaryServerSuite extends HiveThriftServer2Test { test("Support interval type") { withJdbcStatement() { statement => + val legacyIntervalEnabled = SQLConf.get.legacyIntervalEnabled + statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=true") val rs = statement.executeQuery("SELECT interval 3 months 1 hours") assert(rs.next()) assert(rs.getString(1) === "3 months 1 hours") + statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=$legacyIntervalEnabled") } // Invalid interval value withJdbcStatement() { statement => @@ -683,6 +686,8 @@ class HiveThriftBinaryServerSuite extends HiveThriftServer2Test { val ddl1 = s"CREATE GLOBAL TEMP VIEW $viewName1 AS SELECT INTERVAL 1 DAY AS i" val ddl2 = s"CREATE TEMP VIEW $viewName2 as select * from global_temp.$viewName1" withJdbcStatement(viewName1, viewName2) { statement => + val legacyIntervalEnabled = SQLConf.get.legacyIntervalEnabled + statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=true") statement.executeQuery(ddl1) statement.executeQuery(ddl2) val rs = statement.executeQuery(s"SELECT v1.i as a, v2.i as b FROM global_temp.$viewName1" + @@ -691,6 +696,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftServer2Test { assert(rs.getString("a") === "1 days") assert(rs.getString("b") === "1 days") } + statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=$legacyIntervalEnabled") } } From a408ab91d1a40cf098a73e2c7958d3338de47bef Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 10:49:35 +0300 Subject: [PATCH 06/20] Fix SparkThriftServerProtocolVersionsSuite --- .../SparkThriftServerProtocolVersionsSuite.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala index 363436679a6c3..a8f625146d0e2 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala @@ -362,11 +362,13 @@ class SparkThriftServerProtocolVersionsSuite extends HiveThriftServer2TestBase { } test(s"$version get interval type") { - testExecuteStatementWithProtocolVersion(version, "SELECT interval '1' year '2' day") { rs => + testExecuteStatementWithProtocolVersion( + version, + "SELECT CAST('1 year 2 day' AS INTERVAL) AS icol") { rs => assert(rs.next()) assert(rs.getString(1) === "1 years 2 days") val metaData = rs.getMetaData - assert(metaData.getColumnName(1) === "INTERVAL '1 years 2 days'") + assert(metaData.getColumnName(1) === "icol") assert(metaData.getColumnTypeName(1) === "string") assert(metaData.getColumnType(1) === java.sql.Types.VARCHAR) assert(metaData.getPrecision(1) === Int.MaxValue) From baafc51b69159af5d0bcf4b8227efbc51a25a480 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 15:08:13 +0300 Subject: [PATCH 07/20] Fix SQLQuerySuite --- .../org/apache/spark/sql/SQLQuerySuite.scala | 40 ++++++++++--------- 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index ae6337cb2a59d..c70eb43d84f49 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1550,30 +1550,34 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark test("SPARK-8753: add interval type") { import org.apache.spark.unsafe.types.CalendarInterval - val df = sql("select interval 3 years -3 month 7 week 123 microseconds") - checkAnswer(df, Row(new CalendarInterval(12 * 3 - 3, 7 * 7, 123 ))) - withTempPath(f => { - // Currently we don't yet support saving out values of interval data type. - val e = intercept[AnalysisException] { - df.write.json(f.getCanonicalPath) - } - e.message.contains("Cannot save interval data type into external storage") - }) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + val df = sql("select interval 3 years -3 month 7 week 123 microseconds") + checkAnswer(df, Row(new CalendarInterval(12 * 3 - 3, 7 * 7, 123 ))) + withTempPath(f => { + // Currently we don't yet support saving out values of interval data type. + val e = intercept[AnalysisException] { + df.write.json(f.getCanonicalPath) + } + e.message.contains("Cannot save interval data type into external storage") + }) + } } test("SPARK-8945: add and subtract expressions for interval type") { - val df = sql("select interval 3 years -3 month 7 week 123 microseconds as i") - checkAnswer(df, Row(new CalendarInterval(12 * 3 - 3, 7 * 7, 123))) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + val df = sql("select interval 3 years -3 month 7 week 123 microseconds as i") + checkAnswer(df, Row(new CalendarInterval(12 * 3 - 3, 7 * 7, 123))) - checkAnswer(df.select(df("i") + new CalendarInterval(2, 1, 123)), - Row(new CalendarInterval(12 * 3 - 3 + 2, 7 * 7 + 1, 123 + 123))) + checkAnswer(df.select(df("i") + new CalendarInterval(2, 1, 123)), + Row(new CalendarInterval(12 * 3 - 3 + 2, 7 * 7 + 1, 123 + 123))) - checkAnswer(df.select(df("i") - new CalendarInterval(2, 1, 123)), - Row(new CalendarInterval(12 * 3 - 3 - 2, 7 * 7 - 1, 123 - 123))) + checkAnswer(df.select(df("i") - new CalendarInterval(2, 1, 123)), + Row(new CalendarInterval(12 * 3 - 3 - 2, 7 * 7 - 1, 123 - 123))) - // unary minus - checkAnswer(df.select(-df("i")), - Row(new CalendarInterval(-(12 * 3 - 3), -7 * 7, -123))) + // unary minus + checkAnswer(df.select(-df("i")), + Row(new CalendarInterval(-(12 * 3 - 3), -7 * 7, -123))) + } } test("aggregation with codegen updates peak execution memory") { From 142b47e333deee706d63fba80afb06bc5ca47547 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 15:17:59 +0300 Subject: [PATCH 08/20] Fix DataFrameFunctionsSuite --- .../spark/sql/DataFrameFunctionsSuite.scala | 63 ++++++++++--------- 1 file changed, 32 insertions(+), 31 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index 70dc0d09bcad5..0aac24d6d0fce 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -1027,43 +1027,44 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { checkAnswer(Seq((7, 2, -2)).toDF().select(sequence($"_1", $"_2", $"_3")), Seq(Row(Array(7, 5, 3)))) - checkAnswer( - spark.sql("select sequence(" + - " cast('2018-01-01 00:00:00' as timestamp)" + - ", cast('2018-01-02 00:00:00' as timestamp)" + - ", interval 12 hours)"), - Seq(Row(Array( - Timestamp.valueOf("2018-01-01 00:00:00"), - Timestamp.valueOf("2018-01-01 12:00:00"), - Timestamp.valueOf("2018-01-02 00:00:00"))))) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + checkAnswer( + spark.sql("select sequence(" + + " cast('2018-01-01 00:00:00' as timestamp)" + + ", cast('2018-01-02 00:00:00' as timestamp)" + + ", interval 12 hours)"), + Seq(Row(Array( + Timestamp.valueOf("2018-01-01 00:00:00"), + Timestamp.valueOf("2018-01-01 12:00:00"), + Timestamp.valueOf("2018-01-02 00:00:00"))))) + + withDefaultTimeZone(UTC) { + checkAnswer( + spark.sql("select sequence(" + + " cast('2018-01-01' as date)" + + ", cast('2018-03-01' as date)" + + ", interval 1 month)"), + Seq(Row(Array( + Date.valueOf("2018-01-01"), + Date.valueOf("2018-02-01"), + Date.valueOf("2018-03-01"))))) + } + + // test type coercion + checkAnswer( + Seq((1.toByte, 3L, 1)).toDF().select(sequence($"_1", $"_2", $"_3")), + Seq(Row(Array(1L, 2L, 3L)))) - withDefaultTimeZone(UTC) { checkAnswer( spark.sql("select sequence(" + " cast('2018-01-01' as date)" + - ", cast('2018-03-01' as date)" + - ", interval 1 month)"), + ", cast('2018-01-02 00:00:00' as timestamp)" + + ", interval 12 hours)"), Seq(Row(Array( - Date.valueOf("2018-01-01"), - Date.valueOf("2018-02-01"), - Date.valueOf("2018-03-01"))))) + Timestamp.valueOf("2018-01-01 00:00:00"), + Timestamp.valueOf("2018-01-01 12:00:00"), + Timestamp.valueOf("2018-01-02 00:00:00"))))) } - - // test type coercion - checkAnswer( - Seq((1.toByte, 3L, 1)).toDF().select(sequence($"_1", $"_2", $"_3")), - Seq(Row(Array(1L, 2L, 3L)))) - - checkAnswer( - spark.sql("select sequence(" + - " cast('2018-01-01' as date)" + - ", cast('2018-01-02 00:00:00' as timestamp)" + - ", interval 12 hours)"), - Seq(Row(Array( - Timestamp.valueOf("2018-01-01 00:00:00"), - Timestamp.valueOf("2018-01-01 12:00:00"), - Timestamp.valueOf("2018-01-02 00:00:00"))))) - // test invalid data types intercept[AnalysisException] { Seq((true, false)).toDF().selectExpr("sequence(_1, _2)") From ae83c658c8ce0cd591bb0f1b23c60b2e40dcb3e1 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 15:23:01 +0300 Subject: [PATCH 09/20] Fix DataSourceV2DataFrameSuite --- .../DataSourceV2DataFrameSuite.scala | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSuite.scala index efb87dafe0ff8..4f1361929298c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSuite.scala @@ -25,6 +25,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{AppendData, LogicalPlan} import org.apache.spark.sql.connector.catalog.Identifier import org.apache.spark.sql.execution.QueryExecution import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.StructType import org.apache.spark.sql.util.QueryExecutionListener @@ -171,20 +172,22 @@ class DataSourceV2DataFrameSuite } test("Cannot write data with intervals to v2") { - withTable("testcat.table_name") { - val testCatalog = spark.sessionState.catalogManager.catalog("testcat").asTableCatalog - testCatalog.createTable( - Identifier.of(Array(), "table_name"), - new StructType().add("i", "interval"), - Array.empty, Collections.emptyMap[String, String]) - val df = sql("select interval 1 day as i") - val v2Writer = df.writeTo("testcat.table_name") - val e1 = intercept[AnalysisException](v2Writer.append()) - assert(e1.getMessage.contains(s"Cannot use interval type in the table schema.")) - val e2 = intercept[AnalysisException](v2Writer.overwrite(df("i"))) - assert(e2.getMessage.contains(s"Cannot use interval type in the table schema.")) - val e3 = intercept[AnalysisException](v2Writer.overwritePartitions()) - assert(e3.getMessage.contains(s"Cannot use interval type in the table schema.")) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + withTable("testcat.table_name") { + val testCatalog = spark.sessionState.catalogManager.catalog("testcat").asTableCatalog + testCatalog.createTable( + Identifier.of(Array(), "table_name"), + new StructType().add("i", "interval"), + Array.empty, Collections.emptyMap[String, String]) + val df = sql("select interval 1 day as i") + val v2Writer = df.writeTo("testcat.table_name") + val e1 = intercept[AnalysisException](v2Writer.append()) + assert(e1.getMessage.contains(s"Cannot use interval type in the table schema.")) + val e2 = intercept[AnalysisException](v2Writer.overwrite(df("i"))) + assert(e2.getMessage.contains(s"Cannot use interval type in the table schema.")) + val e3 = intercept[AnalysisException](v2Writer.overwritePartitions()) + assert(e3.getMessage.contains(s"Cannot use interval type in the table schema.")) + } } } From 7df057cfccf01b557fbc63c219187a7785fbd03a Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 15:58:51 +0300 Subject: [PATCH 10/20] Fix ExpressionInfoSuite --- .../catalyst/expressions/collectionOperations.scala | 6 ++++++ .../catalyst/expressions/datetimeExpressions.scala | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index 125e796a98c2d..5f1f041e4e4cf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -2474,6 +2474,7 @@ case class Flatten(child: Expression) extends UnaryExpression with NullIntoleran copy(child = newChild) } +// scalastyle:off line.size.limit line.contains.tab @ExpressionDescription( usage = """ _FUNC_(start, stop, step) - Generates an array of elements from start to stop (inclusive), @@ -2502,12 +2503,17 @@ case class Flatten(child: Expression) extends UnaryExpression with NullIntoleran [1,2,3,4,5] > SELECT _FUNC_(5, 1); [5,4,3,2,1] + > SET spark.sql.legacy.interval.enabled=true; + spark.sql.legacy.interval.enabled true > SELECT _FUNC_(to_date('2018-01-01'), to_date('2018-03-01'), interval 1 month); [2018-01-01,2018-02-01,2018-03-01] + > SET spark.sql.legacy.interval.enabled=false; + spark.sql.legacy.interval.enabled false """, group = "array_funcs", since = "2.4.0" ) +// scalastyle:on line.size.limit line.contains.tab case class Sequence( start: Expression, stop: Expression, diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index ba9d458c0ae5a..6fb685e86a5d6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2392,6 +2392,7 @@ object DatePart { } } +// scalastyle:off line.size.limit line.contains.tab // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(field, source) - Extracts a part of the date/timestamp or interval source.", @@ -2410,10 +2411,14 @@ object DatePart { 224 > SELECT _FUNC_('SECONDS', timestamp'2019-10-01 00:00:01.000001'); 1.000001 + > SET spark.sql.legacy.interval.enabled=true; + spark.sql.legacy.interval.enabled true > SELECT _FUNC_('days', interval 1 year 10 months 5 days); 5 > SELECT _FUNC_('seconds', interval 5 hours 30 seconds 1 milliseconds 1 microseconds); 30.001001 + > SET spark.sql.legacy.interval.enabled=false; + spark.sql.legacy.interval.enabled false """, note = """ The _FUNC_ function is equivalent to the SQL-standard function `EXTRACT(field FROM source)` @@ -2421,6 +2426,7 @@ object DatePart { group = "datetime_funcs", since = "3.0.0") // scalastyle:on line.size.limit +// scalastyle:on line.size.limit line.contains.tab case class DatePart(field: Expression, source: Expression, child: Expression) extends RuntimeReplaceable { @@ -2437,6 +2443,7 @@ case class DatePart(field: Expression, source: Expression, child: Expression) copy(child = newChild) } +// scalastyle:off line.size.limit line.contains.tab // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(field FROM source) - Extracts a part of the date/timestamp or interval source.", @@ -2475,10 +2482,14 @@ case class DatePart(field: Expression, source: Expression, child: Expression) 224 > SELECT _FUNC_(SECONDS FROM timestamp'2019-10-01 00:00:01.000001'); 1.000001 + > SET spark.sql.legacy.interval.enabled=true; + spark.sql.legacy.interval.enabled true > SELECT _FUNC_(days FROM interval 1 year 10 months 5 days); 5 > SELECT _FUNC_(seconds FROM interval 5 hours 30 seconds 1 milliseconds 1 microseconds); 30.001001 + > SET spark.sql.legacy.interval.enabled=false; + spark.sql.legacy.interval.enabled false """, note = """ The _FUNC_ function is equivalent to `date_part(field, source)`. @@ -2486,6 +2497,7 @@ case class DatePart(field: Expression, source: Expression, child: Expression) group = "datetime_funcs", since = "3.0.0") // scalastyle:on line.size.limit +// scalastyle:on line.size.limit line.contains.tab case class Extract(field: Expression, source: Expression, child: Expression) extends RuntimeReplaceable { From 39d33c90d0ea9cfb4ed8c27fccaf0480bb3f3f45 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 16:04:52 +0300 Subject: [PATCH 11/20] Re-gen results for PlanStabilitySuite --- .../tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt | 2 +- .../tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt | 2 +- .../tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt | 4 ++-- .../tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt | 4 ++-- 23 files changed, 44 insertions(+), 44 deletions(-) diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt index 0c22f6333e5a9..c2135174bf9d7 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt @@ -71,7 +71,7 @@ Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, Output [2]: [d_date_sk#12, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 2] @@ -79,7 +79,7 @@ Input [2]: [d_date_sk#12, d_date#13] (12) Filter [codegen id : 2] Input [2]: [d_date_sk#12, d_date#13] -Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (d_date#13 <= 1999-03-24)) AND isnotnull(d_date_sk#12)) +Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (cast(d_date#13 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#12)) (13) Project [codegen id : 2] Output [1]: [d_date_sk#12] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt index 683d83235cce5..3a0a89d889e2e 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt @@ -133,7 +133,7 @@ Join condition: None Output [2]: [d_date_sk#17, d_date#18] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2002-02-01), LessThanOrEqual(d_date,2002-04-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2002-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (21) ColumnarToRow [codegen id : 8] @@ -141,7 +141,7 @@ Input [2]: [d_date_sk#17, d_date#18] (22) Filter [codegen id : 8] Input [2]: [d_date_sk#17, d_date#18] -Condition : (((isnotnull(d_date#18) AND (d_date#18 >= 2002-02-01)) AND (d_date#18 <= 2002-04-02)) AND isnotnull(d_date_sk#17)) +Condition : (((isnotnull(d_date#18) AND (d_date#18 >= 2002-02-01)) AND (cast(d_date#18 as timestamp) <= 2002-04-02 00:00:00)) AND isnotnull(d_date_sk#17)) (23) Project [codegen id : 8] Output [1]: [d_date_sk#17] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt index 4f5eebc0c300c..6b3ec9d6da143 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt @@ -71,7 +71,7 @@ Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, Output [2]: [d_date_sk#12, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 2] @@ -79,7 +79,7 @@ Input [2]: [d_date_sk#12, d_date#13] (12) Filter [codegen id : 2] Input [2]: [d_date_sk#12, d_date#13] -Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (d_date#13 <= 1999-03-24)) AND isnotnull(d_date_sk#12)) +Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (cast(d_date#13 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#12)) (13) Project [codegen id : 2] Output [1]: [d_date_sk#12] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt index 7fcbe1befa6b4..b278a072404e3 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt @@ -105,7 +105,7 @@ Input [6]: [inv_item_sk#1, inv_quantity_on_hand#3, inv_date_sk#4, w_warehouse_na Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-10), LessThanOrEqual(d_date,2000-04-10), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), IsNotNull(d_date_sk)] ReadSchema: struct (18) ColumnarToRow [codegen id : 3] @@ -113,7 +113,7 @@ Input [2]: [d_date_sk#13, d_date#14] (19) Filter [codegen id : 3] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 2000-02-10)) AND (d_date#14 <= 2000-04-10)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (cast(d_date#14 as timestamp) >= 2000-02-10 00:00:00)) AND (cast(d_date#14 as timestamp) <= 2000-04-10 00:00:00)) AND isnotnull(d_date_sk#13)) (20) BroadcastExchange Input [2]: [d_date_sk#13, d_date#14] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt index ad918310a918a..f8029766bdb88 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt @@ -97,7 +97,7 @@ Condition : isnotnull(cs_item_sk#8) Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), LessThanOrEqual(d_date,2000-04-26), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), IsNotNull(d_date_sk)] ReadSchema: struct (15) ColumnarToRow [codegen id : 2] @@ -105,7 +105,7 @@ Input [2]: [d_date_sk#11, d_date#12] (16) Filter [codegen id : 2] Input [2]: [d_date_sk#11, d_date#12] -Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-01-27)) AND (d_date#12 <= 2000-04-26)) AND isnotnull(d_date_sk#11)) +Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-01-27)) AND (cast(d_date#12 as timestamp) <= 2000-04-26 00:00:00)) AND isnotnull(d_date_sk#11)) (17) Project [codegen id : 2] Output [1]: [d_date_sk#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt index 9cc89345f5a1c..08bdb71f9db49 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt @@ -84,7 +84,7 @@ Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-01), LessThanOrEqual(d_date,2000-04-01), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (13) ColumnarToRow [codegen id : 2] @@ -92,7 +92,7 @@ Input [2]: [d_date_sk#11, d_date#12] (14) Filter [codegen id : 2] Input [2]: [d_date_sk#11, d_date#12] -Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-02-01)) AND (d_date#12 <= 2000-04-01)) AND isnotnull(d_date_sk#11)) +Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-02-01)) AND (cast(d_date#12 as timestamp) <= 2000-04-01 00:00:00)) AND isnotnull(d_date_sk#11)) (15) Project [codegen id : 2] Output [1]: [d_date_sk#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt index 05a6bb2928ec6..384596bf33ef1 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt @@ -157,7 +157,7 @@ Input [7]: [cs_item_sk#2, cs_sales_price#4, cs_sold_date_sk#5, cr_refunded_cash# Output [2]: [d_date_sk#20, d_date#21] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-10), LessThanOrEqual(d_date,2000-04-10), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), IsNotNull(d_date_sk)] ReadSchema: struct (28) ColumnarToRow [codegen id : 7] @@ -165,7 +165,7 @@ Input [2]: [d_date_sk#20, d_date#21] (29) Filter [codegen id : 7] Input [2]: [d_date_sk#20, d_date#21] -Condition : (((isnotnull(d_date#21) AND (d_date#21 >= 2000-02-10)) AND (d_date#21 <= 2000-04-10)) AND isnotnull(d_date_sk#20)) +Condition : (((isnotnull(d_date#21) AND (cast(d_date#21 as timestamp) >= 2000-02-10 00:00:00)) AND (cast(d_date#21 as timestamp) <= 2000-04-10 00:00:00)) AND isnotnull(d_date_sk#20)) (30) BroadcastExchange Input [2]: [d_date_sk#20, d_date#21] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt index 7c0804a51c860..b815f4ed5a220 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt @@ -123,7 +123,7 @@ Input [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_s Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), LessThanOrEqual(d_date,2000-09-06), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 3] @@ -131,7 +131,7 @@ Input [2]: [d_date_sk#22, d_date#23] (12) Filter [codegen id : 3] Input [2]: [d_date_sk#22, d_date#23] -Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 2000-08-23)) AND (d_date#23 <= 2000-09-06)) AND isnotnull(d_date_sk#22)) +Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 2000-08-23)) AND (cast(d_date#23 as timestamp) <= 2000-09-06 00:00:00)) AND isnotnull(d_date_sk#22)) (13) Project [codegen id : 3] Output [1]: [d_date_sk#22] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt index 177b8e681608a..2cdd9d715749c 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt @@ -313,7 +313,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, false] as bigint) (53) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_ship_date_sk#1] Right keys [1]: [d_date_sk#35] -Join condition: (d_date#36 > d_date#28 + 5 days) +Join condition: (cast(d_date#36 as timestamp) > cast(d_date#28 as timestamp) + 432000000000) (54) Project [codegen id : 10] Output [6]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#16, i_item_desc#19, d_week_seq#29] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt index 7c9c3589c275e..99c4770a1c823 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt @@ -109,7 +109,7 @@ Condition : isnotnull(ss_store_sk#1) Output [2]: [d_date_sk#6, d_date#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-03), LessThanOrEqual(d_date,2000-09-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-03), IsNotNull(d_date_sk)] ReadSchema: struct (5) ColumnarToRow [codegen id : 1] @@ -117,7 +117,7 @@ Input [2]: [d_date_sk#6, d_date#7] (6) Filter [codegen id : 1] Input [2]: [d_date_sk#6, d_date#7] -Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 2000-08-03)) AND (d_date#7 <= 2000-09-02)) AND isnotnull(d_date_sk#6)) +Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 2000-08-03)) AND (cast(d_date#7 as timestamp) <= 2000-09-02 00:00:00)) AND isnotnull(d_date_sk#6)) (7) Project [codegen id : 1] Output [1]: [d_date_sk#6] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt index 8a413745875c9..e6b0e14206493 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt @@ -174,7 +174,7 @@ Input [11]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ Output [2]: [d_date_sk#16, d_date#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), LessThanOrEqual(d_date,2000-09-22), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), IsNotNull(d_date_sk)] ReadSchema: struct (15) ColumnarToRow [codegen id : 5] @@ -182,7 +182,7 @@ Input [2]: [d_date_sk#16, d_date#17] (16) Filter [codegen id : 5] Input [2]: [d_date_sk#16, d_date#17] -Condition : (((isnotnull(d_date#17) AND (d_date#17 >= 2000-08-23)) AND (d_date#17 <= 2000-09-22)) AND isnotnull(d_date_sk#16)) +Condition : (((isnotnull(d_date#17) AND (d_date#17 >= 2000-08-23)) AND (cast(d_date#17 as timestamp) <= 2000-09-22 00:00:00)) AND isnotnull(d_date_sk#16)) (17) Project [codegen id : 5] Output [1]: [d_date_sk#16] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt index a03333f7623cc..508b4ac3bed9f 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt @@ -84,7 +84,7 @@ Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-05-25), LessThanOrEqual(d_date,2000-07-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-05-25), IsNotNull(d_date_sk)] ReadSchema: struct (13) ColumnarToRow [codegen id : 2] @@ -92,7 +92,7 @@ Input [2]: [d_date_sk#11, d_date#12] (14) Filter [codegen id : 2] Input [2]: [d_date_sk#11, d_date#12] -Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-05-25)) AND (d_date#12 <= 2000-07-24)) AND isnotnull(d_date_sk#11)) +Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-05-25)) AND (cast(d_date#12 as timestamp) <= 2000-07-24 00:00:00)) AND isnotnull(d_date_sk#11)) (15) Project [codegen id : 2] Output [1]: [d_date_sk#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt index 46953fe184f28..4976143c64fb6 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt @@ -100,7 +100,7 @@ Condition : isnotnull(ws_item_sk#8) Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), LessThanOrEqual(d_date,2000-04-26), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), IsNotNull(d_date_sk)] ReadSchema: struct (15) ColumnarToRow [codegen id : 2] @@ -108,7 +108,7 @@ Input [2]: [d_date_sk#11, d_date#12] (16) Filter [codegen id : 2] Input [2]: [d_date_sk#11, d_date#12] -Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-01-27)) AND (d_date#12 <= 2000-04-26)) AND isnotnull(d_date_sk#11)) +Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-01-27)) AND (cast(d_date#12 as timestamp) <= 2000-04-26 00:00:00)) AND isnotnull(d_date_sk#11)) (17) Project [codegen id : 2] Output [1]: [d_date_sk#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt index 607f12f77e4c1..66e0cdf91f16c 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt @@ -133,7 +133,7 @@ Join condition: None Output [2]: [d_date_sk#17, d_date#18] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), LessThanOrEqual(d_date,1999-04-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (21) ColumnarToRow [codegen id : 8] @@ -141,7 +141,7 @@ Input [2]: [d_date_sk#17, d_date#18] (22) Filter [codegen id : 8] Input [2]: [d_date_sk#17, d_date#18] -Condition : (((isnotnull(d_date#18) AND (d_date#18 >= 1999-02-01)) AND (d_date#18 <= 1999-04-02)) AND isnotnull(d_date_sk#17)) +Condition : (((isnotnull(d_date#18) AND (d_date#18 >= 1999-02-01)) AND (cast(d_date#18 as timestamp) <= 1999-04-02 00:00:00)) AND isnotnull(d_date_sk#17)) (23) Project [codegen id : 8] Output [1]: [d_date_sk#17] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt index 687e050b44090..d3bd69c6291e1 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt @@ -200,7 +200,7 @@ Join condition: None Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), LessThanOrEqual(d_date,1999-04-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (34) ColumnarToRow [codegen id : 16] @@ -208,7 +208,7 @@ Input [2]: [d_date_sk#22, d_date#23] (35) Filter [codegen id : 16] Input [2]: [d_date_sk#22, d_date#23] -Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 1999-02-01)) AND (d_date#23 <= 1999-04-02)) AND isnotnull(d_date_sk#22)) +Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 1999-02-01)) AND (cast(d_date#23 as timestamp) <= 1999-04-02 00:00:00)) AND isnotnull(d_date_sk#22)) (36) Project [codegen id : 16] Output [1]: [d_date_sk#22] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt index 1507b5ccbc0ae..140e3cbc61c74 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt @@ -73,7 +73,7 @@ Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, Output [2]: [d_date_sk#12, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 2] @@ -81,7 +81,7 @@ Input [2]: [d_date_sk#12, d_date#13] (12) Filter [codegen id : 2] Input [2]: [d_date_sk#12, d_date#13] -Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (d_date#13 <= 1999-03-24)) AND isnotnull(d_date_sk#12)) +Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (cast(d_date#13 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#12)) (13) Project [codegen id : 2] Output [1]: [d_date_sk#12] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt index f6dd412f5c4b1..59dc2d512689e 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt @@ -71,7 +71,7 @@ Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, Output [2]: [d_date_sk#12, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 2] @@ -79,7 +79,7 @@ Input [2]: [d_date_sk#12, d_date#13] (12) Filter [codegen id : 2] Input [2]: [d_date_sk#12, d_date#13] -Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (d_date#13 <= 1999-03-24)) AND isnotnull(d_date_sk#12)) +Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (cast(d_date#13 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#12)) (13) Project [codegen id : 2] Output [1]: [d_date_sk#12] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt index c6a345be29c9d..7d8b933a4118d 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt @@ -71,7 +71,7 @@ Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, Output [2]: [d_date_sk#12, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 2] @@ -79,7 +79,7 @@ Input [2]: [d_date_sk#12, d_date#13] (12) Filter [codegen id : 2] Input [2]: [d_date_sk#12, d_date#13] -Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (d_date#13 <= 1999-03-24)) AND isnotnull(d_date_sk#12)) +Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (cast(d_date#13 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#12)) (13) Project [codegen id : 2] Output [1]: [d_date_sk#12] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt index 036b3cdcb22fe..25ffbfb4b8dd6 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt @@ -136,7 +136,7 @@ Input [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_s Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), LessThanOrEqual(d_date,1998-08-18), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 3] @@ -144,7 +144,7 @@ Input [2]: [d_date_sk#22, d_date#23] (12) Filter [codegen id : 3] Input [2]: [d_date_sk#22, d_date#23] -Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 1998-08-04)) AND (d_date#23 <= 1998-08-18)) AND isnotnull(d_date_sk#22)) +Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 1998-08-04)) AND (cast(d_date#23 as timestamp) <= 1998-08-18 00:00:00)) AND isnotnull(d_date_sk#22)) (13) Project [codegen id : 3] Output [1]: [d_date_sk#22] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt index 0e67565e856ad..8811e5b9d2949 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt @@ -313,7 +313,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, false] as bigint) (53) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_ship_date_sk#1] Right keys [1]: [d_date_sk#35] -Join condition: (d_date#36 > d_date#28 + 5 days) +Join condition: (cast(d_date#36 as timestamp) > cast(d_date#28 as timestamp) + 432000000000) (54) Project [codegen id : 10] Output [6]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#16, i_item_desc#19, d_week_seq#29] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt index 42c9e941756c3..087b0c043c4f2 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt @@ -122,7 +122,7 @@ Condition : isnotnull(ss_store_sk#1) Output [2]: [d_date_sk#6, d_date#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), LessThanOrEqual(d_date,1998-09-03), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), IsNotNull(d_date_sk)] ReadSchema: struct (5) ColumnarToRow [codegen id : 1] @@ -130,7 +130,7 @@ Input [2]: [d_date_sk#6, d_date#7] (6) Filter [codegen id : 1] Input [2]: [d_date_sk#6, d_date#7] -Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 1998-08-04)) AND (d_date#7 <= 1998-09-03)) AND isnotnull(d_date_sk#6)) +Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 1998-08-04)) AND (cast(d_date#7 as timestamp) <= 1998-09-03 00:00:00)) AND isnotnull(d_date_sk#6)) (7) Project [codegen id : 1] Output [1]: [d_date_sk#6] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt index a9efbf56ef743..dcf46839487fa 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt @@ -187,7 +187,7 @@ Input [11]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ Output [2]: [d_date_sk#16, d_date#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), LessThanOrEqual(d_date,1998-09-03), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), IsNotNull(d_date_sk)] ReadSchema: struct (15) ColumnarToRow [codegen id : 5] @@ -195,7 +195,7 @@ Input [2]: [d_date_sk#16, d_date#17] (16) Filter [codegen id : 5] Input [2]: [d_date_sk#16, d_date#17] -Condition : (((isnotnull(d_date#17) AND (d_date#17 >= 1998-08-04)) AND (d_date#17 <= 1998-09-03)) AND isnotnull(d_date_sk#16)) +Condition : (((isnotnull(d_date#17) AND (d_date#17 >= 1998-08-04)) AND (cast(d_date#17 as timestamp) <= 1998-09-03 00:00:00)) AND isnotnull(d_date_sk#16)) (17) Project [codegen id : 5] Output [1]: [d_date_sk#16] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt index b7c5f4081a1f3..edde6b36b2dbc 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt @@ -72,7 +72,7 @@ Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, Output [2]: [d_date_sk#12, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (11) ColumnarToRow [codegen id : 2] @@ -80,7 +80,7 @@ Input [2]: [d_date_sk#12, d_date#13] (12) Filter [codegen id : 2] Input [2]: [d_date_sk#12, d_date#13] -Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (d_date#13 <= 1999-03-24)) AND isnotnull(d_date_sk#12)) +Condition : (((isnotnull(d_date#13) AND (d_date#13 >= 1999-02-22)) AND (cast(d_date#13 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#12)) (13) Project [codegen id : 2] Output [1]: [d_date_sk#12] From 0d4556708b740aaa83e1e1596c22da7da7fb7b03 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 16:08:02 +0300 Subject: [PATCH 12/20] Re-gen results for PlanStabilityWithStatsSuite --- .../approved-plans-v1_4/q12.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q16.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q20.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q21.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q32.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q37.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q40.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q5.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q72.sf100/explain.txt | 2 +- .../approved-plans-v1_4/q77.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q80.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q82.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q92.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q94.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q95.sf100/explain.txt | 4 ++-- .../approved-plans-v1_4/q98.sf100/explain.txt | 4 ++-- .../approved-plans-v2_7/q12.sf100/explain.txt | 4 ++-- .../approved-plans-v2_7/q20.sf100/explain.txt | 4 ++-- .../approved-plans-v2_7/q5a.sf100/explain.txt | 4 ++-- .../approved-plans-v2_7/q72.sf100/explain.txt | 2 +- .../approved-plans-v2_7/q77a.sf100/explain.txt | 4 ++-- .../approved-plans-v2_7/q80a.sf100/explain.txt | 4 ++-- .../approved-plans-v2_7/q98.sf100/explain.txt | 4 ++-- 23 files changed, 44 insertions(+), 44 deletions(-) diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12.sf100/explain.txt index 7c414d9080734..9b21fba17d8e4 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12.sf100/explain.txt @@ -86,7 +86,7 @@ Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#6, Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (14) ColumnarToRow [codegen id : 5] @@ -94,7 +94,7 @@ Input [2]: [d_date_sk#13, d_date#14] (15) Filter [codegen id : 5] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (d_date#14 <= 1999-03-24)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (cast(d_date#14 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#13)) (16) Project [codegen id : 5] Output [1]: [d_date_sk#13] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16.sf100/explain.txt index 7604ea1e786d8..5e8740615d06a 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16.sf100/explain.txt @@ -195,7 +195,7 @@ Input [6]: [cs_ship_date_sk#1, cs_call_center_sk#3, cs_order_number#5, cs_ext_sh Output [2]: [d_date_sk#23, d_date#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2002-02-01), LessThanOrEqual(d_date,2002-04-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2002-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (35) ColumnarToRow [codegen id : 10] @@ -203,7 +203,7 @@ Input [2]: [d_date_sk#23, d_date#24] (36) Filter [codegen id : 10] Input [2]: [d_date_sk#23, d_date#24] -Condition : (((isnotnull(d_date#24) AND (d_date#24 >= 2002-02-01)) AND (d_date#24 <= 2002-04-02)) AND isnotnull(d_date_sk#23)) +Condition : (((isnotnull(d_date#24) AND (d_date#24 >= 2002-02-01)) AND (cast(d_date#24 as timestamp) <= 2002-04-02 00:00:00)) AND isnotnull(d_date_sk#23)) (37) Project [codegen id : 10] Output [1]: [d_date_sk#23] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20.sf100/explain.txt index d6bbbfc515205..3cb2ac435cb32 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20.sf100/explain.txt @@ -86,7 +86,7 @@ Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#6, Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (14) ColumnarToRow [codegen id : 5] @@ -94,7 +94,7 @@ Input [2]: [d_date_sk#13, d_date#14] (15) Filter [codegen id : 5] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (d_date#14 <= 1999-03-24)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (cast(d_date#14 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#13)) (16) Project [codegen id : 5] Output [1]: [d_date_sk#13] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21.sf100/explain.txt index efb45b5ccdb7f..06842552caed7 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21.sf100/explain.txt @@ -78,7 +78,7 @@ Input [6]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_ Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-10), LessThanOrEqual(d_date,2000-04-10), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), IsNotNull(d_date_sk)] ReadSchema: struct (12) ColumnarToRow [codegen id : 2] @@ -86,7 +86,7 @@ Input [2]: [d_date_sk#10, d_date#11] (13) Filter [codegen id : 2] Input [2]: [d_date_sk#10, d_date#11] -Condition : (((isnotnull(d_date#11) AND (d_date#11 >= 2000-02-10)) AND (d_date#11 <= 2000-04-10)) AND isnotnull(d_date_sk#10)) +Condition : (((isnotnull(d_date#11) AND (cast(d_date#11 as timestamp) >= 2000-02-10 00:00:00)) AND (cast(d_date#11 as timestamp) <= 2000-04-10 00:00:00)) AND isnotnull(d_date_sk#10)) (14) BroadcastExchange Input [2]: [d_date_sk#10, d_date#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32.sf100/explain.txt index 5d171e5f595b9..cc0ddf60e406d 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32.sf100/explain.txt @@ -73,7 +73,7 @@ Condition : isnotnull(cs_item_sk#4) Output [2]: [d_date_sk#8, d_date#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), LessThanOrEqual(d_date,2000-04-26), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), IsNotNull(d_date_sk)] ReadSchema: struct (10) ColumnarToRow [codegen id : 2] @@ -81,7 +81,7 @@ Input [2]: [d_date_sk#8, d_date#9] (11) Filter [codegen id : 2] Input [2]: [d_date_sk#8, d_date#9] -Condition : (((isnotnull(d_date#9) AND (d_date#9 >= 2000-01-27)) AND (d_date#9 <= 2000-04-26)) AND isnotnull(d_date_sk#8)) +Condition : (((isnotnull(d_date#9) AND (d_date#9 >= 2000-01-27)) AND (cast(d_date#9 as timestamp) <= 2000-04-26 00:00:00)) AND isnotnull(d_date_sk#8)) (12) Project [codegen id : 2] Output [1]: [d_date_sk#8] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37.sf100/explain.txt index 1f2c169ba0921..edd4accfe1bf3 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37.sf100/explain.txt @@ -87,7 +87,7 @@ Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-01), LessThanOrEqual(d_date,2000-04-01), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (13) ColumnarToRow [codegen id : 2] @@ -95,7 +95,7 @@ Input [2]: [d_date_sk#11, d_date#12] (14) Filter [codegen id : 2] Input [2]: [d_date_sk#11, d_date#12] -Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-02-01)) AND (d_date#12 <= 2000-04-01)) AND isnotnull(d_date_sk#11)) +Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-02-01)) AND (cast(d_date#12 as timestamp) <= 2000-04-01 00:00:00)) AND isnotnull(d_date_sk#11)) (15) Project [codegen id : 2] Output [1]: [d_date_sk#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40.sf100/explain.txt index c53874d2288e5..922bc407d930e 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40.sf100/explain.txt @@ -130,7 +130,7 @@ Input [7]: [cs_warehouse_sk#1, cs_item_sk#2, cs_sales_price#4, cs_sold_date_sk#5 Output [2]: [d_date_sk#17, d_date#18] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-02-10), LessThanOrEqual(d_date,2000-04-10), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), IsNotNull(d_date_sk)] ReadSchema: struct (22) ColumnarToRow [codegen id : 6] @@ -138,7 +138,7 @@ Input [2]: [d_date_sk#17, d_date#18] (23) Filter [codegen id : 6] Input [2]: [d_date_sk#17, d_date#18] -Condition : (((isnotnull(d_date#18) AND (d_date#18 >= 2000-02-10)) AND (d_date#18 <= 2000-04-10)) AND isnotnull(d_date_sk#17)) +Condition : (((isnotnull(d_date#18) AND (cast(d_date#18 as timestamp) >= 2000-02-10 00:00:00)) AND (cast(d_date#18 as timestamp) <= 2000-04-10 00:00:00)) AND isnotnull(d_date_sk#17)) (24) BroadcastExchange Input [2]: [d_date_sk#17, d_date#18] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5.sf100/explain.txt index ec4a566d0e4c4..663a576542d7b 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5.sf100/explain.txt @@ -153,7 +153,7 @@ Input [8]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_l Output [2]: [d_date_sk#25, d_date#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), LessThanOrEqual(d_date,2000-09-06), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), IsNotNull(d_date_sk)] ReadSchema: struct (17) ColumnarToRow [codegen id : 4] @@ -161,7 +161,7 @@ Input [2]: [d_date_sk#25, d_date#26] (18) Filter [codegen id : 4] Input [2]: [d_date_sk#25, d_date#26] -Condition : (((isnotnull(d_date#26) AND (d_date#26 >= 2000-08-23)) AND (d_date#26 <= 2000-09-06)) AND isnotnull(d_date_sk#25)) +Condition : (((isnotnull(d_date#26) AND (d_date#26 >= 2000-08-23)) AND (cast(d_date#26 as timestamp) <= 2000-09-06 00:00:00)) AND isnotnull(d_date_sk#25)) (19) Project [codegen id : 4] Output [1]: [d_date_sk#25] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt index 95092a8bbcc51..1b4bd981ab54e 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt @@ -276,7 +276,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)) (44) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_sold_date_sk#8] Right keys [1]: [d_date_sk#23] -Join condition: (d_date#17 > d_date#24 + 5 days) +Join condition: (cast(d_date#17 as timestamp) > cast(d_date#24 as timestamp) + 432000000000) (45) Project [codegen id : 10] Output [7]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, i_item_desc#21, d_week_seq#25, d_date_sk#28] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77.sf100/explain.txt index 34d2ec82f500d..a7398c210feaa 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77.sf100/explain.txt @@ -109,7 +109,7 @@ Condition : isnotnull(ss_store_sk#1) Output [2]: [d_date_sk#6, d_date#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-03), LessThanOrEqual(d_date,2000-09-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-03), IsNotNull(d_date_sk)] ReadSchema: struct (5) ColumnarToRow [codegen id : 1] @@ -117,7 +117,7 @@ Input [2]: [d_date_sk#6, d_date#7] (6) Filter [codegen id : 1] Input [2]: [d_date_sk#6, d_date#7] -Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 2000-08-03)) AND (d_date#7 <= 2000-09-02)) AND isnotnull(d_date_sk#6)) +Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 2000-08-03)) AND (cast(d_date#7 as timestamp) <= 2000-09-02 00:00:00)) AND isnotnull(d_date_sk#6)) (7) Project [codegen id : 1] Output [1]: [d_date_sk#6] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80.sf100/explain.txt index 2c625a8f4fd41..7867050d6c2a1 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80.sf100/explain.txt @@ -236,7 +236,7 @@ Input [8]: [ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), LessThanOrEqual(d_date,2000-09-22), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-08-23), IsNotNull(d_date_sk)] ReadSchema: struct (29) ColumnarToRow [codegen id : 7] @@ -244,7 +244,7 @@ Input [2]: [d_date_sk#22, d_date#23] (30) Filter [codegen id : 7] Input [2]: [d_date_sk#22, d_date#23] -Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 2000-08-23)) AND (d_date#23 <= 2000-09-22)) AND isnotnull(d_date_sk#22)) +Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 2000-08-23)) AND (cast(d_date#23 as timestamp) <= 2000-09-22 00:00:00)) AND isnotnull(d_date_sk#22)) (31) Project [codegen id : 7] Output [1]: [d_date_sk#22] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82.sf100/explain.txt index 6725e273a3acc..8470bdab29592 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82.sf100/explain.txt @@ -87,7 +87,7 @@ Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-05-25), LessThanOrEqual(d_date,2000-07-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-05-25), IsNotNull(d_date_sk)] ReadSchema: struct (13) ColumnarToRow [codegen id : 2] @@ -95,7 +95,7 @@ Input [2]: [d_date_sk#11, d_date#12] (14) Filter [codegen id : 2] Input [2]: [d_date_sk#11, d_date#12] -Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-05-25)) AND (d_date#12 <= 2000-07-24)) AND isnotnull(d_date_sk#11)) +Condition : (((isnotnull(d_date#12) AND (d_date#12 >= 2000-05-25)) AND (cast(d_date#12 as timestamp) <= 2000-07-24 00:00:00)) AND isnotnull(d_date_sk#11)) (15) Project [codegen id : 2] Output [1]: [d_date_sk#11] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92.sf100/explain.txt index 94e76ced527a0..86ca198daaa42 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92.sf100/explain.txt @@ -76,7 +76,7 @@ Condition : isnotnull(ws_item_sk#4) Output [2]: [d_date_sk#8, d_date#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), LessThanOrEqual(d_date,2000-04-26), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,2000-01-27), IsNotNull(d_date_sk)] ReadSchema: struct (10) ColumnarToRow [codegen id : 2] @@ -84,7 +84,7 @@ Input [2]: [d_date_sk#8, d_date#9] (11) Filter [codegen id : 2] Input [2]: [d_date_sk#8, d_date#9] -Condition : (((isnotnull(d_date#9) AND (d_date#9 >= 2000-01-27)) AND (d_date#9 <= 2000-04-26)) AND isnotnull(d_date_sk#8)) +Condition : (((isnotnull(d_date#9) AND (d_date#9 >= 2000-01-27)) AND (cast(d_date#9 as timestamp) <= 2000-04-26 00:00:00)) AND isnotnull(d_date_sk#8)) (12) Project [codegen id : 2] Output [1]: [d_date_sk#8] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94.sf100/explain.txt index 840f300eadff4..b644ef253f6db 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94.sf100/explain.txt @@ -195,7 +195,7 @@ Input [6]: [ws_ship_date_sk#1, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_ Output [2]: [d_date_sk#23, d_date#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), LessThanOrEqual(d_date,1999-04-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (35) ColumnarToRow [codegen id : 10] @@ -203,7 +203,7 @@ Input [2]: [d_date_sk#23, d_date#24] (36) Filter [codegen id : 10] Input [2]: [d_date_sk#23, d_date#24] -Condition : (((isnotnull(d_date#24) AND (d_date#24 >= 1999-02-01)) AND (d_date#24 <= 1999-04-02)) AND isnotnull(d_date_sk#23)) +Condition : (((isnotnull(d_date#24) AND (d_date#24 >= 1999-02-01)) AND (cast(d_date#24 as timestamp) <= 1999-04-02 00:00:00)) AND isnotnull(d_date_sk#23)) (37) Project [codegen id : 10] Output [1]: [d_date_sk#23] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95.sf100/explain.txt index 82e4651d09edf..2a4e97a63b952 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95.sf100/explain.txt @@ -257,7 +257,7 @@ Input [6]: [ws_ship_date_sk#1, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_ Output [2]: [d_date_sk#28, d_date#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), LessThanOrEqual(d_date,1999-04-02), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-01), IsNotNull(d_date_sk)] ReadSchema: struct (47) ColumnarToRow [codegen id : 18] @@ -265,7 +265,7 @@ Input [2]: [d_date_sk#28, d_date#29] (48) Filter [codegen id : 18] Input [2]: [d_date_sk#28, d_date#29] -Condition : (((isnotnull(d_date#29) AND (d_date#29 >= 1999-02-01)) AND (d_date#29 <= 1999-04-02)) AND isnotnull(d_date_sk#28)) +Condition : (((isnotnull(d_date#29) AND (d_date#29 >= 1999-02-01)) AND (cast(d_date#29 as timestamp) <= 1999-04-02 00:00:00)) AND isnotnull(d_date_sk#28)) (49) Project [codegen id : 18] Output [1]: [d_date_sk#28] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98.sf100/explain.txt index 91f246ce2aa12..016ba5e8d16aa 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98.sf100/explain.txt @@ -88,7 +88,7 @@ Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#6, Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (14) ColumnarToRow [codegen id : 5] @@ -96,7 +96,7 @@ Input [2]: [d_date_sk#13, d_date#14] (15) Filter [codegen id : 5] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (d_date#14 <= 1999-03-24)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (cast(d_date#14 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#13)) (16) Project [codegen id : 5] Output [1]: [d_date_sk#13] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12.sf100/explain.txt index d4acaa2cec459..db9f8eb40a3f0 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12.sf100/explain.txt @@ -86,7 +86,7 @@ Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#6, Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (14) ColumnarToRow [codegen id : 5] @@ -94,7 +94,7 @@ Input [2]: [d_date_sk#13, d_date#14] (15) Filter [codegen id : 5] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (d_date#14 <= 1999-03-24)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (cast(d_date#14 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#13)) (16) Project [codegen id : 5] Output [1]: [d_date_sk#13] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20.sf100/explain.txt index d9b14fcfb5bf9..a4f9f7ca2bd81 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20.sf100/explain.txt @@ -86,7 +86,7 @@ Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#6, Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (14) ColumnarToRow [codegen id : 5] @@ -94,7 +94,7 @@ Input [2]: [d_date_sk#13, d_date#14] (15) Filter [codegen id : 5] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (d_date#14 <= 1999-03-24)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (cast(d_date#14 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#13)) (16) Project [codegen id : 5] Output [1]: [d_date_sk#13] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a.sf100/explain.txt index e660800796abe..dcc02b7d4ebca 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a.sf100/explain.txt @@ -166,7 +166,7 @@ Input [8]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_l Output [2]: [d_date_sk#25, d_date#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), LessThanOrEqual(d_date,1998-08-18), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), IsNotNull(d_date_sk)] ReadSchema: struct (17) ColumnarToRow [codegen id : 4] @@ -174,7 +174,7 @@ Input [2]: [d_date_sk#25, d_date#26] (18) Filter [codegen id : 4] Input [2]: [d_date_sk#25, d_date#26] -Condition : (((isnotnull(d_date#26) AND (d_date#26 >= 1998-08-04)) AND (d_date#26 <= 1998-08-18)) AND isnotnull(d_date_sk#25)) +Condition : (((isnotnull(d_date#26) AND (d_date#26 >= 1998-08-04)) AND (cast(d_date#26 as timestamp) <= 1998-08-18 00:00:00)) AND isnotnull(d_date_sk#25)) (19) Project [codegen id : 4] Output [1]: [d_date_sk#25] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt index 949f9c51b3d22..81344efe99d24 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt @@ -276,7 +276,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)) (44) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_sold_date_sk#8] Right keys [1]: [d_date_sk#23] -Join condition: (d_date#17 > d_date#24 + 5 days) +Join condition: (cast(d_date#17 as timestamp) > cast(d_date#24 as timestamp) + 432000000000) (45) Project [codegen id : 10] Output [7]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, i_item_desc#21, d_week_seq#25, d_date_sk#28] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a.sf100/explain.txt index 1f4f137f42c4a..d6ce671dcf6ee 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a.sf100/explain.txt @@ -122,7 +122,7 @@ Condition : isnotnull(ss_store_sk#1) Output [2]: [d_date_sk#6, d_date#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), LessThanOrEqual(d_date,1998-09-03), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), IsNotNull(d_date_sk)] ReadSchema: struct (5) ColumnarToRow [codegen id : 1] @@ -130,7 +130,7 @@ Input [2]: [d_date_sk#6, d_date#7] (6) Filter [codegen id : 1] Input [2]: [d_date_sk#6, d_date#7] -Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 1998-08-04)) AND (d_date#7 <= 1998-09-03)) AND isnotnull(d_date_sk#6)) +Condition : (((isnotnull(d_date#7) AND (d_date#7 >= 1998-08-04)) AND (cast(d_date#7 as timestamp) <= 1998-09-03 00:00:00)) AND isnotnull(d_date_sk#6)) (7) Project [codegen id : 1] Output [1]: [d_date_sk#6] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a.sf100/explain.txt index cd8f5d5c6ae61..21e521ac3b7e2 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a.sf100/explain.txt @@ -249,7 +249,7 @@ Input [8]: [ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), LessThanOrEqual(d_date,1998-09-03), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1998-08-04), IsNotNull(d_date_sk)] ReadSchema: struct (29) ColumnarToRow [codegen id : 7] @@ -257,7 +257,7 @@ Input [2]: [d_date_sk#22, d_date#23] (30) Filter [codegen id : 7] Input [2]: [d_date_sk#22, d_date#23] -Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 1998-08-04)) AND (d_date#23 <= 1998-09-03)) AND isnotnull(d_date_sk#22)) +Condition : (((isnotnull(d_date#23) AND (d_date#23 >= 1998-08-04)) AND (cast(d_date#23 as timestamp) <= 1998-09-03 00:00:00)) AND isnotnull(d_date_sk#22)) (31) Project [codegen id : 7] Output [1]: [d_date_sk#22] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98.sf100/explain.txt index 33d4b7d49e1d0..6f838bf072063 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98.sf100/explain.txt @@ -87,7 +87,7 @@ Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#6, Output [2]: [d_date_sk#13, d_date#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] -PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), LessThanOrEqual(d_date,1999-03-24), IsNotNull(d_date_sk)] +PushedFilters: [IsNotNull(d_date), GreaterThanOrEqual(d_date,1999-02-22), IsNotNull(d_date_sk)] ReadSchema: struct (14) ColumnarToRow [codegen id : 5] @@ -95,7 +95,7 @@ Input [2]: [d_date_sk#13, d_date#14] (15) Filter [codegen id : 5] Input [2]: [d_date_sk#13, d_date#14] -Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (d_date#14 <= 1999-03-24)) AND isnotnull(d_date_sk#13)) +Condition : (((isnotnull(d_date#14) AND (d_date#14 >= 1999-02-22)) AND (cast(d_date#14 as timestamp) <= 1999-03-24 00:00:00)) AND isnotnull(d_date_sk#13)) (16) Project [codegen id : 5] Output [1]: [d_date_sk#13] From 4d360715e24e38d36e1629ce9f7a81cb0acb604e Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 15 Apr 2021 21:28:26 +0300 Subject: [PATCH 13/20] Fix StreamingJoinSuite --- .../org/apache/spark/sql/streaming/StreamingJoinSuite.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala index 40131e822c5ce..69435619eb19e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala @@ -31,6 +31,7 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession} import org.apache.spark.sql.execution.streaming.{MemoryStream, StatefulOperatorStateInfo, StreamingSymmetricHashJoinExec, StreamingSymmetricHashJoinHelper} import org.apache.spark.sql.execution.streaming.state.{StateStore, StateStoreProviderId} import org.apache.spark.sql.functions._ +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.util.Utils abstract class StreamingJoinSuite @@ -47,6 +48,10 @@ abstract class StreamingJoinSuite StateStore.stop() } + override protected def sparkConf = super.sparkConf + // TODO(SPARK-35095): Use ANSI intervals in streaming join tests + .set(SQLConf.LEGACY_INTERVAL_ENABLED, true) + protected def setupStream(prefix: String, multiplier: Int): (MemoryStream[Int], DataFrame) = { val input = MemoryStream[Int] val df = input.toDF From 3024d3e42b8be55c5dfcae367ae18d09b3da191e Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 00:07:22 +0300 Subject: [PATCH 14/20] Fix ExpressionParserSuite --- .../parser/ExpressionParserSuite.scala | 158 +++++++++--------- 1 file changed, 80 insertions(+), 78 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 5c64e572bbd77..a025f5fcf817c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -669,92 +669,94 @@ class ExpressionParserSuite extends AnalysisTest { } test("intervals") { - def checkIntervals(intervalValue: String, expected: Literal): Unit = { - Seq( - "" -> expected, - "-" -> UnaryMinus(expected) - ).foreach { case (sign, expectedLiteral) => - assertEqual(s"${sign}interval $intervalValue", expectedLiteral) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + def checkIntervals(intervalValue: String, expected: Literal): Unit = { + Seq( + "" -> expected, + "-" -> UnaryMinus(expected) + ).foreach { case (sign, expectedLiteral) => + assertEqual(s"${sign}interval $intervalValue", expectedLiteral) + } } - } - // Empty interval statement - intercept("interval", "at least one time unit should be given for interval literal") - - // Single Intervals. - val forms = Seq("", "s") - val values = Seq("0", "10", "-7", "21") - intervalUnits.foreach { unit => - forms.foreach { form => - values.foreach { value => - val expected = intervalLiteral(unit, value) - checkIntervals(s"$value $unit$form", expected) - checkIntervals(s"'$value' $unit$form", expected) - } + // Empty interval statement + intercept("interval", "at least one time unit should be given for interval literal") + + // Single Intervals. + val forms = Seq("", "s") + val values = Seq("0", "10", "-7", "21") + intervalUnits.foreach { unit => + forms.foreach { form => + values.foreach { value => + val expected = intervalLiteral(unit, value) + checkIntervals(s"$value $unit$form", expected) + checkIntervals(s"'$value' $unit$form", expected) + } + } } - } - // Hive nanosecond notation. - checkIntervals("13.123456789 seconds", intervalLiteral(SECOND, "13.123456789")) - checkIntervals( - "-13.123456789 second", - Literal(new CalendarInterval( - 0, - 0, - DateTimeTestUtils.secFrac(-13, -123, -456)))) - checkIntervals( - "13.123456 second", - Literal(new CalendarInterval( - 0, - 0, - DateTimeTestUtils.secFrac(13, 123, 456)))) - checkIntervals("1.001 second", - Literal(IntervalUtils.stringToInterval("1 second 1 millisecond"))) - - // Non Existing unit - intercept("interval 10 nanoseconds", "invalid unit 'nanoseconds'") - - // Year-Month intervals. - val yearMonthValues = Seq("123-10", "496-0", "-2-3", "-123-0", "\t -1-2\t") - yearMonthValues.foreach { value => - val result = Literal(IntervalUtils.fromYearMonthString(value)) - checkIntervals(s"'$value' year to month", result) - } + // Hive nanosecond notation. + checkIntervals("13.123456789 seconds", intervalLiteral(SECOND, "13.123456789")) + checkIntervals( + "-13.123456789 second", + Literal(new CalendarInterval( + 0, + 0, + DateTimeTestUtils.secFrac(-13, -123, -456)))) + checkIntervals( + "13.123456 second", + Literal(new CalendarInterval( + 0, + 0, + DateTimeTestUtils.secFrac(13, 123, 456)))) + checkIntervals("1.001 second", + Literal(IntervalUtils.stringToInterval("1 second 1 millisecond"))) + + // Non Existing unit + intercept("interval 10 nanoseconds", "invalid unit 'nanoseconds'") + + // Year-Month intervals. + val yearMonthValues = Seq("123-10", "496-0", "-2-3", "-123-0", "\t -1-2\t") + yearMonthValues.foreach { value => + val result = Literal(IntervalUtils.fromYearMonthString(value)) + checkIntervals(s"'$value' year to month", result) + } - // Day-Time intervals. - val datTimeValues = Seq( - "99 11:22:33.123456789", - "-99 11:22:33.123456789", - "10 9:8:7.123456789", - "1 0:0:0", - "-1 0:0:0", - "1 0:0:1", - "\t 1 0:0:1 ") - datTimeValues.foreach { value => - val result = Literal(IntervalUtils.fromDayTimeString(value)) - checkIntervals(s"'$value' day to second", result) - } + // Day-Time intervals. + val datTimeValues = Seq( + "99 11:22:33.123456789", + "-99 11:22:33.123456789", + "10 9:8:7.123456789", + "1 0:0:0", + "-1 0:0:0", + "1 0:0:1", + "\t 1 0:0:1 ") + datTimeValues.foreach { value => + val result = Literal(IntervalUtils.fromDayTimeString(value)) + checkIntervals(s"'$value' day to second", result) + } - // Hour-Time intervals. - val hourTimeValues = Seq( - "11:22:33.123456789", - "9:8:7.123456789", - "-19:18:17.123456789", - "0:0:0", - "0:0:1") - hourTimeValues.foreach { value => - val result = Literal(IntervalUtils.fromDayTimeString(value, HOUR, SECOND)) - checkIntervals(s"'$value' hour to second", result) - } + // Hour-Time intervals. + val hourTimeValues = Seq( + "11:22:33.123456789", + "9:8:7.123456789", + "-19:18:17.123456789", + "0:0:0", + "0:0:1") + hourTimeValues.foreach { value => + val result = Literal(IntervalUtils.fromDayTimeString(value, HOUR, SECOND)) + checkIntervals(s"'$value' hour to second", result) + } - // Unknown FROM TO intervals - intercept("interval '10' month to second", - "Intervals FROM month TO second are not supported.") + // Unknown FROM TO intervals + intercept("interval '10' month to second", + "Intervals FROM month TO second are not supported.") - // Composed intervals. - checkIntervals( - "3 months 4 days 22 seconds 1 millisecond", - Literal(new CalendarInterval(3, 4, 22001000L))) + // Composed intervals. + checkIntervals( + "3 months 4 days 22 seconds 1 millisecond", + Literal(new CalendarInterval(3, 4, 22001000L))) + } } test("composed expressions") { From 94a5a98064c0f0fbb4df7fb49b8cc981690f8313 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 00:18:09 +0300 Subject: [PATCH 15/20] Fix StreamingJoinHelperSuite --- .../analysis/StreamingJoinHelperSuite.scala | 207 +++++++++--------- 1 file changed, 108 insertions(+), 99 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelperSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelperSuite.scala index 7566545f98355..dd55ec34f189c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelperSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelperSuite.scala @@ -21,120 +21,129 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, import org.apache.spark.sql.catalyst.optimizer.SimpleTestOptimizer import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.plans.logical.{EventTimeWatermark, Filter, LeafNode} +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{IntegerType, MetadataBuilder, TimestampType} class StreamingJoinHelperSuite extends AnalysisTest { test("extract watermark from time condition") { - val attributesToFindConstraintFor = Seq( - AttributeReference("leftTime", TimestampType)(), - AttributeReference("leftOther", IntegerType)()) - val metadataWithWatermark = new MetadataBuilder() - .putLong(EventTimeWatermark.delayKey, 1000) - .build() - val attributesWithWatermark = Seq( - AttributeReference("rightTime", TimestampType, metadata = metadataWithWatermark)(), - AttributeReference("rightOther", IntegerType)()) + // TODO(SPARK-35095): Use ANSI intervals in streaming join tests + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + val attributesToFindConstraintFor = Seq( + AttributeReference("leftTime", TimestampType)(), + AttributeReference("leftOther", IntegerType)()) + val metadataWithWatermark = new MetadataBuilder() + .putLong(EventTimeWatermark.delayKey, 1000) + .build() + val attributesWithWatermark = Seq( + AttributeReference("rightTime", TimestampType, metadata = metadataWithWatermark)(), + AttributeReference("rightOther", IntegerType)()) - case class DummyLeafNode() extends LeafNode { - override def output: Seq[Attribute] = - attributesToFindConstraintFor ++ attributesWithWatermark - } + case class DummyLeafNode() extends LeafNode { + override def output: Seq[Attribute] = + attributesToFindConstraintFor ++ attributesWithWatermark + } - def watermarkFrom( - conditionStr: String, - rightWatermark: Option[Long] = Some(10000)): Option[Long] = { - val conditionExpr = Some(conditionStr).map { str => - val plan = - Filter( - CatalystSqlParser.parseExpression(str), - DummyLeafNode()) - val optimized = SimpleTestOptimizer.execute(SimpleAnalyzer.execute(plan)) - optimized.asInstanceOf[Filter].condition + def watermarkFrom( + conditionStr: String, + rightWatermark: Option[Long] = Some(10000)): Option[Long] = { + val conditionExpr = Some(conditionStr).map { str => + val plan = + Filter( + CatalystSqlParser.parseExpression(str), + DummyLeafNode()) + val optimized = SimpleTestOptimizer.execute(SimpleAnalyzer.execute(plan)) + optimized.asInstanceOf[Filter].condition + } + StreamingJoinHelper.getStateValueWatermark( + AttributeSet(attributesToFindConstraintFor), AttributeSet(attributesWithWatermark), + conditionExpr, rightWatermark) } - StreamingJoinHelper.getStateValueWatermark( - AttributeSet(attributesToFindConstraintFor), AttributeSet(attributesWithWatermark), - conditionExpr, rightWatermark) - } - // Test comparison directionality. E.g. if leftTime < rightTime and rightTime > watermark, - // then cannot define constraint on leftTime. - assert(watermarkFrom("leftTime > rightTime") === Some(10000)) - assert(watermarkFrom("leftTime >= rightTime") === Some(9999)) - assert(watermarkFrom("leftTime < rightTime") === None) - assert(watermarkFrom("leftTime <= rightTime") === None) - assert(watermarkFrom("rightTime > leftTime") === None) - assert(watermarkFrom("rightTime >= leftTime") === None) - assert(watermarkFrom("rightTime < leftTime") === Some(10000)) - assert(watermarkFrom("rightTime <= leftTime") === Some(9999)) + // Test comparison directionality. E.g. if leftTime < rightTime and rightTime > watermark, + // then cannot define constraint on leftTime. + assert(watermarkFrom("leftTime > rightTime") === Some(10000)) + assert(watermarkFrom("leftTime >= rightTime") === Some(9999)) + assert(watermarkFrom("leftTime < rightTime") === None) + assert(watermarkFrom("leftTime <= rightTime") === None) + assert(watermarkFrom("rightTime > leftTime") === None) + assert(watermarkFrom("rightTime >= leftTime") === None) + assert(watermarkFrom("rightTime < leftTime") === Some(10000)) + assert(watermarkFrom("rightTime <= leftTime") === Some(9999)) - // Test type conversions - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG)") === Some(10000)) - assert(watermarkFrom("CAST(leftTime AS LONG) < CAST(rightTime AS LONG)") === None) - assert(watermarkFrom("CAST(leftTime AS DOUBLE) > CAST(rightTime AS DOUBLE)") === Some(10000)) - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS DOUBLE)") === Some(10000)) - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS FLOAT)") === Some(10000)) - assert(watermarkFrom("CAST(leftTime AS DOUBLE) > CAST(rightTime AS FLOAT)") === Some(10000)) - assert(watermarkFrom("CAST(leftTime AS STRING) > CAST(rightTime AS STRING)") === None) + // Test type conversions + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG)") === Some(10000)) + assert(watermarkFrom("CAST(leftTime AS LONG) < CAST(rightTime AS LONG)") === None) + assert(watermarkFrom("CAST(leftTime AS DOUBLE) > CAST(rightTime AS DOUBLE)") === Some(10000)) + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS DOUBLE)") === Some(10000)) + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS FLOAT)") === Some(10000)) + assert(watermarkFrom("CAST(leftTime AS DOUBLE) > CAST(rightTime AS FLOAT)") === Some(10000)) + assert(watermarkFrom("CAST(leftTime AS STRING) > CAST(rightTime AS STRING)") === None) - // Test with timestamp type + calendar interval on either side of equation - // Note: timestamptype and calendar interval don't commute, so less valid combinations to test. - assert(watermarkFrom("leftTime > rightTime + interval 1 second") === Some(11000)) - assert(watermarkFrom("leftTime + interval 2 seconds > rightTime ") === Some(8000)) - assert(watermarkFrom("leftTime > rightTime - interval 3 second") === Some(7000)) - assert(watermarkFrom("rightTime < leftTime - interval 3 second") === Some(13000)) - assert(watermarkFrom("rightTime - interval 1 second < leftTime - interval 3 second") - === Some(12000)) + // Test with timestamp type + calendar interval on either side of equation + // Note: timestamptype and calendar interval don't commute, + // so less valid combinations to test. + assert(watermarkFrom("leftTime > rightTime + interval 1 second") === Some(11000)) + assert(watermarkFrom("leftTime + interval 2 seconds > rightTime ") === Some(8000)) + assert(watermarkFrom("leftTime > rightTime - interval 3 second") === Some(7000)) + assert(watermarkFrom("rightTime < leftTime - interval 3 second") === Some(13000)) + assert(watermarkFrom("rightTime - interval 1 second < leftTime - interval 3 second") + === Some(12000)) - // Test with casted long type + constants on either side of equation - // Note: long type and constants commute, so more combinations to test. - // -- Constants on the right - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) + 1") === Some(11000)) - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) - 1") === Some(9000)) - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST((rightTime + interval 1 second) AS LONG)") - === Some(11000)) - assert(watermarkFrom("CAST(leftTime AS LONG) > 2 + CAST(rightTime AS LONG)") === Some(12000)) - assert(watermarkFrom("CAST(leftTime AS LONG) > -0.5 + CAST(rightTime AS LONG)") === Some(9500)) - assert(watermarkFrom("CAST(leftTime AS LONG) - CAST(rightTime AS LONG) > 2") === Some(12000)) - assert(watermarkFrom("-CAST(rightTime AS DOUBLE) + CAST(leftTime AS LONG) > 0.1") - === Some(10100)) - assert(watermarkFrom("0 > CAST(rightTime AS LONG) - CAST(leftTime AS LONG) + 0.2") - === Some(10200)) - // -- Constants on the left - assert(watermarkFrom("CAST(leftTime AS LONG) + 2 > CAST(rightTime AS LONG)") === Some(8000)) - assert(watermarkFrom("1 + CAST(leftTime AS LONG) > CAST(rightTime AS LONG)") === Some(9000)) - assert(watermarkFrom("CAST((leftTime + interval 3 second) AS LONG) > CAST(rightTime AS LONG)") - === Some(7000)) - assert(watermarkFrom("CAST(leftTime AS LONG) - 2 > CAST(rightTime AS LONG)") === Some(12000)) - assert(watermarkFrom("CAST(leftTime AS LONG) + 0.5 > CAST(rightTime AS LONG)") === Some(9500)) - assert(watermarkFrom("CAST(leftTime AS LONG) - CAST(rightTime AS LONG) - 2 > 0") - === Some(12000)) - assert(watermarkFrom("-CAST(rightTime AS LONG) + CAST(leftTime AS LONG) - 0.1 > 0") - === Some(10100)) - // -- Constants on both sides, mixed types - assert(watermarkFrom("CAST(leftTime AS LONG) - 2.0 > CAST(rightTime AS LONG) + 1") - === Some(13000)) + // Test with casted long type + constants on either side of equation + // Note: long type and constants commute, so more combinations to test. + // -- Constants on the right + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) + 1") === Some(11000)) + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) - 1") === Some(9000)) + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST((rightTime + interval 1 second) AS LONG)") + === Some(11000)) + assert(watermarkFrom("CAST(leftTime AS LONG) > 2 + CAST(rightTime AS LONG)") === Some(12000)) + assert(watermarkFrom("CAST(leftTime AS LONG) > -0.5 + CAST(rightTime AS LONG)") === + Some(9500)) + assert(watermarkFrom("CAST(leftTime AS LONG) - CAST(rightTime AS LONG) > 2") === Some(12000)) + assert(watermarkFrom("-CAST(rightTime AS DOUBLE) + CAST(leftTime AS LONG) > 0.1") + === Some(10100)) + assert(watermarkFrom("0 > CAST(rightTime AS LONG) - CAST(leftTime AS LONG) + 0.2") + === Some(10200)) + // -- Constants on the left + assert(watermarkFrom("CAST(leftTime AS LONG) + 2 > CAST(rightTime AS LONG)") === Some(8000)) + assert(watermarkFrom("1 + CAST(leftTime AS LONG) > CAST(rightTime AS LONG)") === Some(9000)) + assert( + watermarkFrom("CAST((leftTime + interval 3 second) AS LONG) > CAST(rightTime AS LONG)") + === Some(7000)) + assert(watermarkFrom("CAST(leftTime AS LONG) - 2 > CAST(rightTime AS LONG)") === Some(12000)) + assert(watermarkFrom("CAST(leftTime AS LONG) + 0.5 > CAST(rightTime AS LONG)") === Some(9500)) + assert(watermarkFrom("CAST(leftTime AS LONG) - CAST(rightTime AS LONG) - 2 > 0") + === Some(12000)) + assert(watermarkFrom("-CAST(rightTime AS LONG) + CAST(leftTime AS LONG) - 0.1 > 0") + === Some(10100)) + // -- Constants on both sides, mixed types + assert(watermarkFrom("CAST(leftTime AS LONG) - 2.0 > CAST(rightTime AS LONG) + 1") + === Some(13000)) - // Test multiple conditions, should return minimum watermark - assert(watermarkFrom( - "leftTime > rightTime - interval 3 second AND rightTime < leftTime + interval 2 seconds") === - Some(7000)) // first condition wins - assert(watermarkFrom( - "leftTime > rightTime - interval 3 second AND rightTime < leftTime + interval 4 seconds") === - Some(6000)) // second condition wins + // Test multiple conditions, should return minimum watermark + assert(watermarkFrom( + "leftTime > rightTime - interval 3 second AND rightTime < leftTime + interval 2 seconds") + === Some(7000)) // first condition wins + assert(watermarkFrom( + "leftTime > rightTime - interval 3 second AND rightTime < leftTime + interval 4 seconds") + === Some(6000)) // second condition wins - // Test invalid comparisons - assert(watermarkFrom("cast(leftTime AS LONG) > leftOther") === None) // non-time attributes - assert(watermarkFrom("leftOther > rightOther") === None) // non-time attributes - assert(watermarkFrom("leftOther > rightOther AND leftTime > rightTime") === Some(10000)) - assert(watermarkFrom("cast(rightTime AS DOUBLE) < rightOther") === None) // non-time attributes - assert(watermarkFrom("leftTime > rightTime + interval 1 month") === None) // month not allowed + // Test invalid comparisons + assert(watermarkFrom("cast(leftTime AS LONG) > leftOther") === None) // non-time attributes + assert(watermarkFrom("leftOther > rightOther") === None) // non-time attributes + assert(watermarkFrom("leftOther > rightOther AND leftTime > rightTime") === Some(10000)) + // non-time attributes + assert(watermarkFrom("cast(rightTime AS DOUBLE) < rightOther") === None) + assert(watermarkFrom("leftTime > rightTime + interval 1 month") === None) // month not allowed - // Test static comparisons - assert(watermarkFrom("cast(leftTime AS LONG) > 10") === Some(10000)) + // Test static comparisons + assert(watermarkFrom("cast(leftTime AS LONG) > 10") === Some(10000)) - // Test non-positive results - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) - 10") === Some(0)) - assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) - 100") === Some(-90000)) + // Test non-positive results + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) - 10") === Some(0)) + assert(watermarkFrom("CAST(leftTime AS LONG) > CAST(rightTime AS LONG) - 100") === + Some(-90000)) + } } } From 2cdf813edbdef7cd0f71786e58c1370855a6f1c0 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 09:09:20 +0300 Subject: [PATCH 16/20] SQLConf.get -> conf --- .../scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 2f16c2d4e2d6c..021ee78409991 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -2314,7 +2314,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg */ override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) { val parsedInterval = parseIntervalLiteral(ctx) - if (SQLConf.get.legacyIntervalEnabled) { + if (conf.legacyIntervalEnabled) { Literal(parsedInterval, CalendarIntervalType) } else if (parsedInterval.months != 0) { if (parsedInterval.days != 0 || parsedInterval.microseconds != 0) { From cd578f0bb2ca8f46979f5a4e6179d0e8fd28a0b0 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 09:27:18 +0300 Subject: [PATCH 17/20] Fix ExpressionParserSuite --- .../sql/catalyst/parser/AstBuilder.scala | 31 +++++++++++-------- .../spark/sql/errors/QueryParsingErrors.scala | 2 +- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 021ee78409991..fc018cf8adcb6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -2099,7 +2099,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg ex.setStackTrace(e.getStackTrace) throw ex } - Literal(interval, CalendarIntervalType) + calendarIntervalToLiteral(interval, ctx) case "X" => val padding = if (value.length % 2 != 0) "0" else "" Literal(DatatypeConverter.parseHexBinary(padding + value)) @@ -2306,6 +2306,22 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg UnresolvedTableOrView(visitMultipartIdentifier(ctx), commandName, allowTempView) } + private def calendarIntervalToLiteral( + calendarInterval: CalendarInterval, + ctx: ParserRuleContext): Literal = { + if (conf.legacyIntervalEnabled) { + Literal(calendarInterval, CalendarIntervalType) + } else if (calendarInterval.months != 0) { + if (calendarInterval.days != 0 || calendarInterval.microseconds != 0) { + throw QueryParsingErrors.mixedIntervalError(ctx) + } + Literal(calendarInterval.months, YearMonthIntervalType) + } else { + val micros = IntervalUtils.getDuration(calendarInterval, TimeUnit.MICROSECONDS) + Literal(micros, DayTimeIntervalType) + } + } + /** * Create a [[CalendarInterval]] or ANSI interval literal expression. * Two syntaxes are supported: @@ -2313,18 +2329,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg * - from-to unit, for instance: interval '1-2' year to month. */ override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) { - val parsedInterval = parseIntervalLiteral(ctx) - if (conf.legacyIntervalEnabled) { - Literal(parsedInterval, CalendarIntervalType) - } else if (parsedInterval.months != 0) { - if (parsedInterval.days != 0 || parsedInterval.microseconds != 0) { - throw QueryParsingErrors.mixedIntervalError(ctx) - } - Literal(parsedInterval.months, YearMonthIntervalType) - } else { - val micros = IntervalUtils.getDuration(parsedInterval, TimeUnit.MICROSECONDS) - Literal(micros, DayTimeIntervalType) - } + calendarIntervalToLiteral(parseIntervalLiteral(ctx), ctx) } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index d5e619e8b11e9..93b392f7ab02b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -368,7 +368,7 @@ object QueryParsingErrors { new ParseException("LOCAL is supported only with file: scheme", ctx) } - def mixedIntervalError(ctx: IntervalContext): Throwable = { + def mixedIntervalError(ctx: ParserRuleContext): Throwable = { new ParseException( "Mixing of year-month and day-time fields is not allowed. " + s"Set '${SQLConf.LEGACY_INTERVAL_ENABLED.key}' to true to enable the legacy interval type " + From 2e67e6edf413cbc6264ed1c2da146757bb8589ef Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 09:52:06 +0300 Subject: [PATCH 18/20] Test CalendarInterval in ExpressionParserSuite --- .../parser/ExpressionParserSuite.scala | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index a025f5fcf817c..db608996d5fd4 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -464,18 +464,20 @@ class ExpressionParserSuite extends AnalysisTest { intercept("timestamP '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP value") // Interval. - val intervalLiteral = Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour")) - assertEqual("InterVal 'interval 3 month 1 hour'", intervalLiteral) - assertEqual("INTERVAL '3 month 1 hour'", intervalLiteral) - intercept("Interval 'interval 3 monthsss 1 hoursss'", "Cannot parse the INTERVAL value") - assertEqual( - "-interval '3 month 1 hour'", - UnaryMinus(Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour")))) - val intervalStrWithAllUnits = "1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds " + - "100 millisecond 200 microseconds" - assertEqual( - s"interval '$intervalStrWithAllUnits'", - Literal(IntervalUtils.stringToInterval(intervalStrWithAllUnits))) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + val intervalLiteral = Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour")) + assertEqual("InterVal 'interval 3 month 1 hour'", intervalLiteral) + assertEqual("INTERVAL '3 month 1 hour'", intervalLiteral) + intercept("Interval 'interval 3 monthsss 1 hoursss'", "Cannot parse the INTERVAL value") + assertEqual( + "-interval '3 month 1 hour'", + UnaryMinus(Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour")))) + val intervalStrWithAllUnits = "1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds " + + "100 millisecond 200 microseconds" + assertEqual( + s"interval '$intervalStrWithAllUnits'", + Literal(IntervalUtils.stringToInterval(intervalStrWithAllUnits))) + } // Binary. assertEqual("X'A'", Literal(Array(0x0a).map(_.toByte))) From de065155c2ba023655b94f2ed9f9cf7749ac856f Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 11:12:03 +0300 Subject: [PATCH 19/20] Re-gen expected test results --- .../sql-tests/results/ansi/datetime.sql.out | 14 +- .../sql-tests/results/ansi/interval.sql.out | 134 +++++++++------ .../sql-tests/results/datetime-legacy.sql.out | 16 +- .../sql-tests/results/datetime.sql.out | 16 +- .../sql-tests/results/interval.sql.out | 158 +++++++++++------- .../results/postgreSQL/interval.sql.out | 28 ++-- .../native/dateTimeOperations.sql.out | 78 ++++----- .../sql-tests/results/udf/udf-window.sql.out | 2 +- .../sql-tests/results/window.sql.out | 2 +- .../approved-plans-v1_4/q72.sf100/explain.txt | 2 +- .../approved-plans-v1_4/q72/explain.txt | 2 +- .../approved-plans-v2_7/q72.sf100/explain.txt | 2 +- .../approved-plans-v2_7/q72/explain.txt | 2 +- 13 files changed, 261 insertions(+), 195 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out index b82ca536ff742..a550f56436a2c 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -291,7 +291,7 @@ select timestamp '2019-01-01中文' -- !query select timestamp'2011-11-11 11:11:11' + interval '2' day -- !query schema -struct +struct -- !query output 2011-11-13 11:11:11 @@ -299,7 +299,7 @@ struct -- !query select timestamp'2011-11-11 11:11:11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 11:11:11 @@ -307,7 +307,7 @@ struct -- !query select date'2011-11-11 11:11:11' + interval '2' second -- !query schema -struct +struct -- !query output 2011-11-11 00:00:02 @@ -315,7 +315,7 @@ struct -- !query select date'2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output 2011-11-10 23:59:58 @@ -323,7 +323,7 @@ struct -- !query select '2011-11-11' - interval '2' day -- !query schema -struct<2011-11-11 - 172800000000:string> +struct<2011-11-11 - INTERVAL '2 00:00:00' DAY TO SECOND:string> -- !query output 2011-11-09 00:00:00 @@ -331,7 +331,7 @@ struct<2011-11-11 - 172800000000:string> -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<2011-11-11 11:11:11 - 2000000:string> +struct<2011-11-11 11:11:11 - INTERVAL '0 00:00:02' DAY TO SECOND:string> -- !query output 2011-11-11 11:11:09 @@ -351,7 +351,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- 2000000)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- INTERVAL '0 00:00:02' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 60736474a94af..3c0f8f9697351 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -74,13 +74,13 @@ select interval '2 seconds' / 0 struct<> -- !query output java.lang.ArithmeticException -divide by zero +/ by zero -- !query select interval '2 seconds' / null -- !query schema -struct +struct -- !query output NULL @@ -88,7 +88,7 @@ NULL -- !query select interval '2 seconds' * null -- !query schema -struct +struct -- !query output NULL @@ -96,7 +96,7 @@ NULL -- !query select null * interval '2 seconds' -- !query schema -struct +struct -- !query output NULL @@ -104,9 +104,15 @@ NULL -- !query select -interval '-1 month 1 day -1 second' -- !query schema -struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output -1 months -1 days 1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select -interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -126,9 +132,15 @@ select -interval -1 month 1 day -1 second -- !query select +interval '-1 month 1 day -1 second' -- !query schema -struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output --1 months 1 days -1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select +interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -148,7 +160,7 @@ select +interval -1 month 1 day -1 second -- !query select interval -'1-1' year to month -- !query schema -struct<-13:year-month interval> +struct -- !query output -1-1 @@ -156,7 +168,7 @@ struct<-13:year-month interval> -- !query select interval -'-1-1' year to month -- !query schema -struct<13:year-month interval> +struct -- !query output 1-1 @@ -164,7 +176,7 @@ struct<13:year-month interval> -- !query select interval +'-1-1' year to month -- !query schema -struct<-13:year-month interval> +struct -- !query output -1-1 @@ -172,7 +184,7 @@ struct<-13:year-month interval> -- !query select interval - '1 2:3:4.001' day to second -- !query schema -struct<-93784001000:day-time interval> +struct -- !query output -1 02:03:04.001000000 @@ -180,7 +192,7 @@ struct<-93784001000:day-time interval> -- !query select interval +'1 2:3:4.001' day to second -- !query schema -struct<93784001000:day-time interval> +struct -- !query output 1 02:03:04.001000000 @@ -188,7 +200,7 @@ struct<93784001000:day-time interval> -- !query select interval -'-1 2:3:4.001' day to second -- !query schema -struct<93784001000:day-time interval> +struct -- !query output 1 02:03:04.001000000 @@ -325,7 +337,7 @@ NULL -- !query select interval 13.123456789 seconds, interval -13.123456789 second -- !query schema -struct<13123456:day-time interval,-13123456:day-time interval> +struct -- !query output 0 00:00:13.123456000 -0 00:00:13.123456000 @@ -361,7 +373,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 -- !query select interval '0 0:0:0.1' day to second -- !query schema -struct<100000:day-time interval> +struct -- !query output 0 00:00:00.100000000 @@ -369,7 +381,7 @@ struct<100000:day-time interval> -- !query select interval '10-9' year to month -- !query schema -struct<129:year-month interval> +struct -- !query output 10-9 @@ -377,7 +389,7 @@ struct<129:year-month interval> -- !query select interval '20 15' day to hour -- !query schema -struct<1782000000000:day-time interval> +struct -- !query output 20 15:00:00.000000000 @@ -385,7 +397,7 @@ struct<1782000000000:day-time interval> -- !query select interval '20 15:40' day to minute -- !query schema -struct<1784400000000:day-time interval> +struct -- !query output 20 15:40:00.000000000 @@ -393,7 +405,7 @@ struct<1784400000000:day-time interval> -- !query select interval '20 15:40:32.99899999' day to second -- !query schema -struct<1784432998999:day-time interval> +struct -- !query output 20 15:40:32.998999000 @@ -401,7 +413,7 @@ struct<1784432998999:day-time interval> -- !query select interval '15:40' hour to minute -- !query schema -struct<56400000000:day-time interval> +struct -- !query output 0 15:40:00.000000000 @@ -409,7 +421,7 @@ struct<56400000000:day-time interval> -- !query select interval '15:40:32.99899999' hour to second -- !query schema -struct<56432998999:day-time interval> +struct -- !query output 0 15:40:32.998999000 @@ -417,7 +429,7 @@ struct<56432998999:day-time interval> -- !query select interval '40:32.99899999' minute to second -- !query schema -struct<2432998999:day-time interval> +struct -- !query output 0 00:40:32.998999000 @@ -425,7 +437,7 @@ struct<2432998999:day-time interval> -- !query select interval '40:32' minute to second -- !query schema -struct<2432000000:day-time interval> +struct -- !query output 0 00:40:32.000000000 @@ -547,7 +559,7 @@ select interval 10 nanoseconds -- !query select map(1, interval 1 day, 2, interval 3 week) -- !query schema -struct> +struct> -- !query output {1:1 00:00:00.000000000,2:21 00:00:00.000000000} @@ -555,25 +567,43 @@ struct> -- !query select interval 'interval 3 year 1 hour' -- !query schema -struct +struct<> -- !query output -3 years 1 hours +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval 'interval 3 year 1 hour' +-------^^^ -- !query select interval '3 year 1 hour' -- !query schema -struct +struct<> -- !query output -3 years 1 hours +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval '3 year 1 hour' +-------^^^ -- !query SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds' -- !query schema -struct +struct<> -- !query output -1 years 3 months 16 days 1 hours 3 minutes 2.1002 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds' +-------^^^ -- !query @@ -816,7 +846,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -832,7 +862,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -843,7 +873,7 @@ select interval '2-2' year to month - interval '3-3' year to month from interval_arithmetic -- !query schema -struct<(26 + 39):year-month interval,(26 - 39):year-month interval> +struct<(INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3-3' YEAR TO MONTH):year-month interval,(INTERVAL '2-2' YEAR TO MONTH - INTERVAL '3-3' YEAR TO MONTH):year-month interval> -- !query output 5-5 -1-1 @@ -859,7 +889,7 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -875,7 +905,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -891,7 +921,7 @@ select interval '99 11:22:33.123456789' day to second + strval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -902,7 +932,7 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query schema -struct<(8594553123456 + 896887123456):day-time interval,(8594553123456 - 896887123456):day-time interval> +struct<(INTERVAL '99 11:22:33.123456' DAY TO SECOND + INTERVAL '10 09:08:07.123456' DAY TO SECOND):day-time interval,(INTERVAL '99 11:22:33.123456' DAY TO SECOND - INTERVAL '10 09:08:07.123456' DAY TO SECOND):day-time interval> -- !query output 109 20:30:40.246912000 89 02:14:26.000000000 @@ -910,31 +940,31 @@ struct<(8594553123456 + 896887123456):day-time interval,(8594553123456 - 8968871 -- !query select interval '\t interval 1 day' -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query select interval 'interval \t 1\tday' -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query select interval 'interval\t1\tday' -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query select interval '1\t' day -- !query schema -struct<86400000000:day-time interval> +struct -- !query output 1 00:00:00.000000000 @@ -942,7 +972,7 @@ struct<86400000000:day-time interval> -- !query select interval '1 ' day -- !query schema -struct<86400000000:day-time interval> +struct -- !query output 1 00:00:00.000000000 @@ -950,7 +980,7 @@ struct<86400000000:day-time interval> -- !query select interval '2-2\t' year to month -- !query schema -struct<26:year-month interval> +struct -- !query output 2-2 @@ -972,7 +1002,7 @@ select interval '-\t2-2\t' year to month -- !query select interval '\n0 12:34:46.789\t' day to second -- !query schema -struct<45286789000:day-time interval> +struct -- !query output 0 12:34:46.789000000 @@ -1067,7 +1097,7 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -integer overflow +Overflow -- !query @@ -1076,7 +1106,7 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -integer overflow +Overflow -- !query @@ -1086,7 +1116,7 @@ SELECT to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), from_csv(to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), 'a interval, b interval') -- !query schema -struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, 32, b, 4200000000)):string,from_csv(to_csv(named_struct(a, 32, b, 4200000000))):struct> +struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, INTERVAL '2-8' YEAR TO MONTH, b, INTERVAL '0 01:10:00' DAY TO SECOND)):string,from_csv(to_csv(named_struct(a, INTERVAL '2-8' YEAR TO MONTH, b, INTERVAL '0 01:10:00' DAY TO SECOND))):struct> -- !query output {"a":1,"b":1 days} 1,1 days 32,4200000000 {"a":null,"b":null} @@ -1242,6 +1272,6 @@ select interval 'interval 1' day -- !query select interval '-\t 1' day -- !query schema -struct<-86400000000:day-time interval> +struct -- !query output -1 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 2e9540521e485..c7b1b0447e53d 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -271,7 +271,7 @@ select timestamp '2019-01-01中文' -- !query select timestamp'2011-11-11 11:11:11' + interval '2' day -- !query schema -struct +struct -- !query output 2011-11-13 11:11:11 @@ -279,7 +279,7 @@ struct -- !query select timestamp'2011-11-11 11:11:11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 11:11:11 @@ -287,7 +287,7 @@ struct -- !query select date'2011-11-11 11:11:11' + interval '2' second -- !query schema -struct +struct -- !query output 2011-11-11 00:00:02 @@ -295,7 +295,7 @@ struct -- !query select date'2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output 2011-11-10 23:59:58 @@ -303,7 +303,7 @@ struct -- !query select '2011-11-11' - interval '2' day -- !query schema -struct<2011-11-11 - 172800000000:string> +struct<2011-11-11 - INTERVAL '2 00:00:00' DAY TO SECOND:string> -- !query output 2011-11-09 00:00:00 @@ -311,7 +311,7 @@ struct<2011-11-11 - 172800000000:string> -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<2011-11-11 11:11:11 - 2000000:string> +struct<2011-11-11 11:11:11 - INTERVAL '0 00:00:02' DAY TO SECOND:string> -- !query output 2011-11-11 11:11:09 @@ -319,7 +319,7 @@ struct<2011-11-11 11:11:11 - 2000000:string> -- !query select '1' - interval '2' second -- !query schema -struct<1 - 2000000:string> +struct<1 - INTERVAL '0 00:00:02' DAY TO SECOND:string> -- !query output NULL @@ -330,7 +330,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- 2000000)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- INTERVAL '0 00:00:02' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index 003c1706874fb..9059c86a3b563 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -271,7 +271,7 @@ select timestamp '2019-01-01中文' -- !query select timestamp'2011-11-11 11:11:11' + interval '2' day -- !query schema -struct +struct -- !query output 2011-11-13 11:11:11 @@ -279,7 +279,7 @@ struct -- !query select timestamp'2011-11-11 11:11:11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 11:11:11 @@ -287,7 +287,7 @@ struct -- !query select date'2011-11-11 11:11:11' + interval '2' second -- !query schema -struct +struct -- !query output 2011-11-11 00:00:02 @@ -295,7 +295,7 @@ struct -- !query select date'2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output 2011-11-10 23:59:58 @@ -303,7 +303,7 @@ struct -- !query select '2011-11-11' - interval '2' day -- !query schema -struct<2011-11-11 - 172800000000:string> +struct<2011-11-11 - INTERVAL '2 00:00:00' DAY TO SECOND:string> -- !query output 2011-11-09 00:00:00 @@ -311,7 +311,7 @@ struct<2011-11-11 - 172800000000:string> -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct<2011-11-11 11:11:11 - 2000000:string> +struct<2011-11-11 11:11:11 - INTERVAL '0 00:00:02' DAY TO SECOND:string> -- !query output 2011-11-11 11:11:09 @@ -319,7 +319,7 @@ struct<2011-11-11 11:11:11 - 2000000:string> -- !query select '1' - interval '2' second -- !query schema -struct<1 - 2000000:string> +struct<1 - INTERVAL '0 00:00:02' DAY TO SECOND:string> -- !query output NULL @@ -330,7 +330,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- 2000000)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- INTERVAL '0 00:00:02' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index f029803659c6a..81b97e0b2b8e5 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -71,15 +71,16 @@ long overflow -- !query select interval '2 seconds' / 0 -- !query schema -struct +struct<> -- !query output -NULL +java.lang.ArithmeticException +/ by zero -- !query select interval '2 seconds' / null -- !query schema -struct +struct -- !query output NULL @@ -87,7 +88,7 @@ NULL -- !query select interval '2 seconds' * null -- !query schema -struct +struct -- !query output NULL @@ -95,7 +96,7 @@ NULL -- !query select null * interval '2 seconds' -- !query schema -struct +struct -- !query output NULL @@ -103,9 +104,15 @@ NULL -- !query select -interval '-1 month 1 day -1 second' -- !query schema -struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output -1 months -1 days 1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select -interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -125,9 +132,15 @@ select -interval -1 month 1 day -1 second -- !query select +interval '-1 month 1 day -1 second' -- !query schema -struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval> +struct<> -- !query output --1 months 1 days -1 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 8) + +== SQL == +select +interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -147,7 +160,7 @@ select +interval -1 month 1 day -1 second -- !query select interval -'1-1' year to month -- !query schema -struct<-13:year-month interval> +struct -- !query output -1-1 @@ -155,7 +168,7 @@ struct<-13:year-month interval> -- !query select interval -'-1-1' year to month -- !query schema -struct<13:year-month interval> +struct -- !query output 1-1 @@ -163,7 +176,7 @@ struct<13:year-month interval> -- !query select interval +'-1-1' year to month -- !query schema -struct<-13:year-month interval> +struct -- !query output -1-1 @@ -171,7 +184,7 @@ struct<-13:year-month interval> -- !query select interval - '1 2:3:4.001' day to second -- !query schema -struct<-93784001000:day-time interval> +struct -- !query output -1 02:03:04.001000000 @@ -179,7 +192,7 @@ struct<-93784001000:day-time interval> -- !query select interval +'1 2:3:4.001' day to second -- !query schema -struct<93784001000:day-time interval> +struct -- !query output 1 02:03:04.001000000 @@ -187,7 +200,7 @@ struct<93784001000:day-time interval> -- !query select interval -'-1 2:3:4.001' day to second -- !query schema -struct<93784001000:day-time interval> +struct -- !query output 1 02:03:04.001000000 @@ -323,7 +336,7 @@ NULL -- !query select interval 13.123456789 seconds, interval -13.123456789 second -- !query schema -struct<13123456:day-time interval,-13123456:day-time interval> +struct -- !query output 0 00:00:13.123456000 -0 00:00:13.123456000 @@ -359,7 +372,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 -- !query select interval '0 0:0:0.1' day to second -- !query schema -struct<100000:day-time interval> +struct -- !query output 0 00:00:00.100000000 @@ -367,7 +380,7 @@ struct<100000:day-time interval> -- !query select interval '10-9' year to month -- !query schema -struct<129:year-month interval> +struct -- !query output 10-9 @@ -375,7 +388,7 @@ struct<129:year-month interval> -- !query select interval '20 15' day to hour -- !query schema -struct<1782000000000:day-time interval> +struct -- !query output 20 15:00:00.000000000 @@ -383,7 +396,7 @@ struct<1782000000000:day-time interval> -- !query select interval '20 15:40' day to minute -- !query schema -struct<1784400000000:day-time interval> +struct -- !query output 20 15:40:00.000000000 @@ -391,7 +404,7 @@ struct<1784400000000:day-time interval> -- !query select interval '20 15:40:32.99899999' day to second -- !query schema -struct<1784432998999:day-time interval> +struct -- !query output 20 15:40:32.998999000 @@ -399,7 +412,7 @@ struct<1784432998999:day-time interval> -- !query select interval '15:40' hour to minute -- !query schema -struct<56400000000:day-time interval> +struct -- !query output 0 15:40:00.000000000 @@ -407,7 +420,7 @@ struct<56400000000:day-time interval> -- !query select interval '15:40:32.99899999' hour to second -- !query schema -struct<56432998999:day-time interval> +struct -- !query output 0 15:40:32.998999000 @@ -415,7 +428,7 @@ struct<56432998999:day-time interval> -- !query select interval '40:32.99899999' minute to second -- !query schema -struct<2432998999:day-time interval> +struct -- !query output 0 00:40:32.998999000 @@ -423,7 +436,7 @@ struct<2432998999:day-time interval> -- !query select interval '40:32' minute to second -- !query schema -struct<2432000000:day-time interval> +struct -- !query output 0 00:40:32.000000000 @@ -545,7 +558,7 @@ select interval 10 nanoseconds -- !query select map(1, interval 1 day, 2, interval 3 week) -- !query schema -struct> +struct> -- !query output {1:1 00:00:00.000000000,2:21 00:00:00.000000000} @@ -553,25 +566,43 @@ struct> -- !query select interval 'interval 3 year 1 hour' -- !query schema -struct +struct<> -- !query output -3 years 1 hours +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval 'interval 3 year 1 hour' +-------^^^ -- !query select interval '3 year 1 hour' -- !query schema -struct +struct<> -- !query output -3 years 1 hours +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +select interval '3 year 1 hour' +-------^^^ -- !query SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds' -- !query schema -struct +struct<> -- !query output -1 years 3 months 16 days 1 hours 3 minutes 2.1002 seconds +org.apache.spark.sql.catalyst.parser.ParseException + +Mixing of year-month and day-time fields is not allowed. Set 'spark.sql.legacy.interval.enabled' to true to enable the legacy interval type which supports mixed fields.(line 1, pos 7) + +== SQL == +SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds' +-------^^^ -- !query @@ -814,7 +845,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -830,7 +861,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -841,7 +872,7 @@ select interval '2-2' year to month - interval '3-3' year to month from interval_arithmetic -- !query schema -struct<(26 + 39):year-month interval,(26 - 39):year-month interval> +struct<(INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3-3' YEAR TO MONTH):year-month interval,(INTERVAL '2-2' YEAR TO MONTH - INTERVAL '3-3' YEAR TO MONTH):year-month interval> -- !query output 5-5 -1-1 @@ -857,7 +888,7 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -873,7 +904,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -889,7 +920,7 @@ select interval '99 11:22:33.123456789' day to second + strval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -900,7 +931,7 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query schema -struct<(8594553123456 + 896887123456):day-time interval,(8594553123456 - 896887123456):day-time interval> +struct<(INTERVAL '99 11:22:33.123456' DAY TO SECOND + INTERVAL '10 09:08:07.123456' DAY TO SECOND):day-time interval,(INTERVAL '99 11:22:33.123456' DAY TO SECOND - INTERVAL '10 09:08:07.123456' DAY TO SECOND):day-time interval> -- !query output 109 20:30:40.246912000 89 02:14:26.000000000 @@ -908,31 +939,31 @@ struct<(8594553123456 + 896887123456):day-time interval,(8594553123456 - 8968871 -- !query select interval '\t interval 1 day' -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query select interval 'interval \t 1\tday' -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query select interval 'interval\t1\tday' -- !query schema -struct +struct -- !query output -1 days +1 00:00:00.000000000 -- !query select interval '1\t' day -- !query schema -struct<86400000000:day-time interval> +struct -- !query output 1 00:00:00.000000000 @@ -940,7 +971,7 @@ struct<86400000000:day-time interval> -- !query select interval '1 ' day -- !query schema -struct<86400000000:day-time interval> +struct -- !query output 1 00:00:00.000000000 @@ -948,7 +979,7 @@ struct<86400000000:day-time interval> -- !query select interval '2-2\t' year to month -- !query schema -struct<26:year-month interval> +struct -- !query output 2-2 @@ -970,7 +1001,7 @@ select interval '-\t2-2\t' year to month -- !query select interval '\n0 12:34:46.789\t' day to second -- !query schema -struct<45286789000:day-time interval> +struct -- !query output 0 12:34:46.789000000 @@ -1035,41 +1066,46 @@ select interval 'interval 1中文day' -- !query select -(a) from values (interval '-2147483648 months', interval '2147483647 months') t(a, b) -- !query schema -struct<(- a):interval> +struct<> -- !query output --178956970 years -8 months +java.lang.ArithmeticException +integer overflow -- !query select a - b from values (interval '-2147483648 months', interval '2147483647 months') t(a, b) -- !query schema -struct<(a - b):interval> +struct<> -- !query output -1 months +java.lang.ArithmeticException +integer overflow -- !query select b + interval '1 month' from values (interval '-2147483648 months', interval '2147483647 months') t(a, b) -- !query schema -struct<(b + INTERVAL '1 months'):interval> +struct<> -- !query output --178956970 years -8 months +java.lang.ArithmeticException +integer overflow -- !query select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 months') t(a, b) -- !query schema -struct +struct<> -- !query output --178956970 years -8 months +java.lang.ArithmeticException +Overflow -- !query select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 months') t(a, b) -- !query schema -struct +struct<> -- !query output --178956970 years -8 months +java.lang.ArithmeticException +Overflow -- !query @@ -1079,7 +1115,7 @@ SELECT to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), from_csv(to_csv(named_struct('a', interval 32 month, 'b', interval 70 minute)), 'a interval, b interval') -- !query schema -struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, 32, b, 4200000000)):string,from_csv(to_csv(named_struct(a, 32, b, 4200000000))):struct> +struct,to_csv(from_csv(1, 1 day)):string,to_csv(named_struct(a, INTERVAL '2-8' YEAR TO MONTH, b, INTERVAL '0 01:10:00' DAY TO SECOND)):string,from_csv(to_csv(named_struct(a, INTERVAL '2-8' YEAR TO MONTH, b, INTERVAL '0 01:10:00' DAY TO SECOND))):struct> -- !query output {"a":1,"b":1 days} 1,1 days 32,4200000000 {"a":null,"b":null} @@ -1235,6 +1271,6 @@ select interval 'interval 1' day -- !query select interval '-\t 1' day -- !query schema -struct<-86400000000:day-time interval> +struct -- !query output -1 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index df12d2607e3c6..b132cc71b3e3c 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -5,7 +5,7 @@ -- !query SELECT interval '999' second -- !query schema -struct<999000000:day-time interval> +struct -- !query output 0 00:16:39.000000000 @@ -13,7 +13,7 @@ struct<999000000:day-time interval> -- !query SELECT interval '999' minute -- !query schema -struct<59940000000:day-time interval> +struct -- !query output 0 16:39:00.000000000 @@ -21,7 +21,7 @@ struct<59940000000:day-time interval> -- !query SELECT interval '999' hour -- !query schema -struct<3596400000000:day-time interval> +struct -- !query output 41 15:00:00.000000000 @@ -29,7 +29,7 @@ struct<3596400000000:day-time interval> -- !query SELECT interval '999' day -- !query schema -struct<86313600000000:day-time interval> +struct -- !query output 999 00:00:00.000000000 @@ -37,7 +37,7 @@ struct<86313600000000:day-time interval> -- !query SELECT interval '999' month -- !query schema -struct<999:year-month interval> +struct -- !query output 83-3 @@ -45,7 +45,7 @@ struct<999:year-month interval> -- !query SELECT interval '1' year -- !query schema -struct<12:year-month interval> +struct -- !query output 1-0 @@ -53,7 +53,7 @@ struct<12:year-month interval> -- !query SELECT interval '2' month -- !query schema -struct<2:year-month interval> +struct -- !query output 0-2 @@ -61,7 +61,7 @@ struct<2:year-month interval> -- !query SELECT interval '3' day -- !query schema -struct<259200000000:day-time interval> +struct -- !query output 3 00:00:00.000000000 @@ -69,7 +69,7 @@ struct<259200000000:day-time interval> -- !query SELECT interval '4' hour -- !query schema -struct<14400000000:day-time interval> +struct -- !query output 0 04:00:00.000000000 @@ -77,7 +77,7 @@ struct<14400000000:day-time interval> -- !query SELECT interval '5' minute -- !query schema -struct<300000000:day-time interval> +struct -- !query output 0 00:05:00.000000000 @@ -85,7 +85,7 @@ struct<300000000:day-time interval> -- !query SELECT interval '6' second -- !query schema -struct<6000000:day-time interval> +struct -- !query output 0 00:00:06.000000000 @@ -93,7 +93,7 @@ struct<6000000:day-time interval> -- !query SELECT interval '1-2' year to month -- !query schema -struct<14:year-month interval> +struct -- !query output 1-2 @@ -129,7 +129,7 @@ SELECT interval '1 2:03:04' day to hour -- !query SELECT interval '1 2:03' day to minute -- !query schema -struct<93780000000:day-time interval> +struct -- !query output 1 02:03:00.000000000 @@ -165,7 +165,7 @@ SELECT interval '1 2:03' day to second -- !query SELECT interval '1 2:03:04' day to second -- !query schema -struct<93784000000:day-time interval> +struct -- !query output 1 02:03:04.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index 05f2b491df86f..9ba1f1bcfc972 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -16,7 +16,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -25,7 +25,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -34,7 +34,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -43,7 +43,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -52,7 +52,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -61,7 +61,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -70,13 +70,13 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select cast('2017-12-11' as string) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 00:00:00 @@ -84,7 +84,7 @@ struct -- !query select cast('2017-12-11 09:30:00' as string) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -95,7 +95,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -104,13 +104,13 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select cast('2017-12-11 09:30:00.0' as timestamp) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -118,7 +118,7 @@ struct -- !query select cast('2017-12-11 09:30:00' as date) + interval 2 day -- !query schema -struct +struct -- !query output 2017-12-13 00:00:00 @@ -129,7 +129,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -138,7 +138,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -147,7 +147,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -156,7 +156,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -165,7 +165,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -174,7 +174,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -183,13 +183,13 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select interval 2 day + cast('2017-12-11' as string) -- !query schema -struct +struct -- !query output 2017-12-13 00:00:00 @@ -197,7 +197,7 @@ struct -- !query select interval 2 day + cast('2017-12-11 09:30:00' as string) -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -208,7 +208,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -217,13 +217,13 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + 172800000000' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2 00:00:00' DAY TO SECOND' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select interval 2 day + cast('2017-12-11 09:30:00.0' as timestamp) -- !query schema -struct +struct -- !query output 2017-12-13 09:30:00 @@ -231,7 +231,7 @@ struct -- !query select interval 2 day + cast('2017-12-11 09:30:00' as date) -- !query schema -struct +struct -- !query output 2017-12-13 00:00:00 @@ -242,7 +242,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -251,7 +251,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -260,7 +260,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -269,7 +269,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -278,7 +278,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -287,7 +287,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -296,13 +296,13 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select cast('2017-12-11' as string) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 00:00:00 @@ -310,7 +310,7 @@ struct -- !query select cast('2017-12-11 09:30:00' as string) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 09:30:00 @@ -321,7 +321,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -330,13 +330,13 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + (- 172800000000)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2 00:00:00' DAY TO SECOND)' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select cast('2017-12-11 09:30:00.0' as timestamp) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 09:30:00 @@ -344,6 +344,6 @@ struct -- !query select cast('2017-12-11 09:30:00' as date) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 00:00:00 diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index 911e22483b40c..48f953307c4e3 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -157,7 +157,7 @@ ORDER BY udf(cate), val_timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY testdata.val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 2001600000000 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'day-time interval' which is used in the range frame.; line 1 pos 56 +cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY testdata.val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '23 04:00:00' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'day-time interval' which is used in the range frame.; line 1 pos 56 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index 7937c77006f61..6eaf137de2678 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -201,7 +201,7 @@ ORDER BY cate, val_timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 2001600000000 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'day-time interval' which is used in the range frame.; line 1 pos 51 +cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '23 04:00:00' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'day-time interval' which is used in the range frame.; line 1 pos 51 -- !query diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt index 1b4bd981ab54e..b5ad67b5853dc 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72.sf100/explain.txt @@ -276,7 +276,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)) (44) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_sold_date_sk#8] Right keys [1]: [d_date_sk#23] -Join condition: (cast(d_date#17 as timestamp) > cast(d_date#24 as timestamp) + 432000000000) +Join condition: (cast(d_date#17 as timestamp) > cast(d_date#24 as timestamp) + INTERVAL '5 00:00:00' DAY TO SECOND) (45) Project [codegen id : 10] Output [7]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, i_item_desc#21, d_week_seq#25, d_date_sk#28] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt index 2cdd9d715749c..c86f77a7488a5 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt @@ -313,7 +313,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, false] as bigint) (53) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_ship_date_sk#1] Right keys [1]: [d_date_sk#35] -Join condition: (cast(d_date#36 as timestamp) > cast(d_date#28 as timestamp) + 432000000000) +Join condition: (cast(d_date#36 as timestamp) > cast(d_date#28 as timestamp) + INTERVAL '5 00:00:00' DAY TO SECOND) (54) Project [codegen id : 10] Output [6]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#16, i_item_desc#19, d_week_seq#29] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt index 81344efe99d24..f83a0ff0b4547 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72.sf100/explain.txt @@ -276,7 +276,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)) (44) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_sold_date_sk#8] Right keys [1]: [d_date_sk#23] -Join condition: (cast(d_date#17 as timestamp) > cast(d_date#24 as timestamp) + 432000000000) +Join condition: (cast(d_date#17 as timestamp) > cast(d_date#24 as timestamp) + INTERVAL '5 00:00:00' DAY TO SECOND) (45) Project [codegen id : 10] Output [7]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, i_item_desc#21, d_week_seq#25, d_date_sk#28] diff --git a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt index 8811e5b9d2949..0e3f911aa6824 100644 --- a/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt +++ b/sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt @@ -313,7 +313,7 @@ Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, false] as bigint) (53) BroadcastHashJoin [codegen id : 10] Left keys [1]: [cs_ship_date_sk#1] Right keys [1]: [d_date_sk#35] -Join condition: (cast(d_date#36 as timestamp) > cast(d_date#28 as timestamp) + 432000000000) +Join condition: (cast(d_date#36 as timestamp) > cast(d_date#28 as timestamp) + INTERVAL '5 00:00:00' DAY TO SECOND) (54) Project [codegen id : 10] Output [6]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#16, i_item_desc#19, d_week_seq#29] From 7668405ebd902cea698b76c373d0c06b1e759426 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 16 Apr 2021 14:23:57 +0300 Subject: [PATCH 20/20] Fix other tests --- .../sql-tests/results/ansi/literals.sql.out | 4 +- .../sql-tests/results/literals.sql.out | 4 +- .../sql-tests/results/misc-functions.sql.out | 4 +- .../apache/spark/sql/DateFunctionsSuite.scala | 56 ++++++++++--------- 4 files changed, 36 insertions(+), 32 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index df370e4f76a3f..1fb25854a9c12 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -451,9 +451,9 @@ cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: -- !query select +interval '1 day' -- !query schema -struct<(+ INTERVAL '1 days'):interval> +struct<(+ INTERVAL '1 00:00:00' DAY TO SECOND):day-time interval> -- !query output -1 days +1 00:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index df370e4f76a3f..1fb25854a9c12 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -451,9 +451,9 @@ cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: -- !query select +interval '1 day' -- !query schema -struct<(+ INTERVAL '1 days'):interval> +struct<(+ INTERVAL '1 00:00:00' DAY TO SECOND):day-time interval> -- !query output -1 days +1 00:00:00.000000000 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out index d927e890b0336..80c2e394a7e29 100644 --- a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out @@ -37,9 +37,9 @@ float double decimal(2,1) -- !query select typeof(date '1986-05-23'), typeof(timestamp '1986-05-23'), typeof(interval '23 days') -- !query schema -struct +struct -- !query output -date timestamp interval +date timestamp day-time interval -- !query diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index d927953677d03..649fba5c0ee73 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -292,35 +292,39 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { } test("time_add") { - val t1 = Timestamp.valueOf("2015-07-31 23:59:59") - val t2 = Timestamp.valueOf("2015-12-31 00:00:00") - val d1 = Date.valueOf("2015-07-31") - val d2 = Date.valueOf("2015-12-31") - val i = new CalendarInterval(2, 2, 2000000L) - val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") - checkAnswer( - df.selectExpr(s"d + INTERVAL'${i.toString}'"), - Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02")))) - checkAnswer( - df.selectExpr(s"t + INTERVAL'${i.toString}'"), - Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")), - Row(Timestamp.valueOf("2016-03-02 00:00:02")))) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + val t1 = Timestamp.valueOf("2015-07-31 23:59:59") + val t2 = Timestamp.valueOf("2015-12-31 00:00:00") + val d1 = Date.valueOf("2015-07-31") + val d2 = Date.valueOf("2015-12-31") + val i = new CalendarInterval(2, 2, 2000000L) + val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") + checkAnswer( + df.selectExpr(s"d + INTERVAL'${i.toString}'"), + Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02")))) + checkAnswer( + df.selectExpr(s"t + INTERVAL'${i.toString}'"), + Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")), + Row(Timestamp.valueOf("2016-03-02 00:00:02")))) + } } test("time_sub") { - val t1 = Timestamp.valueOf("2015-10-01 00:00:01") - val t2 = Timestamp.valueOf("2016-02-29 00:00:02") - val d1 = Date.valueOf("2015-09-30") - val d2 = Date.valueOf("2016-02-29") - val i = new CalendarInterval(2, 2, 2000000L) - val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") - checkAnswer( - df.selectExpr(s"d - INTERVAL'${i.toString}'"), - Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26")))) - checkAnswer( - df.selectExpr(s"t - INTERVAL'${i.toString}'"), - Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")), - Row(Timestamp.valueOf("2015-12-27 00:00:00")))) + withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { + val t1 = Timestamp.valueOf("2015-10-01 00:00:01") + val t2 = Timestamp.valueOf("2016-02-29 00:00:02") + val d1 = Date.valueOf("2015-09-30") + val d2 = Date.valueOf("2016-02-29") + val i = new CalendarInterval(2, 2, 2000000L) + val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") + checkAnswer( + df.selectExpr(s"d - INTERVAL'${i.toString}'"), + Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26")))) + checkAnswer( + df.selectExpr(s"t - INTERVAL'${i.toString}'"), + Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")), + Row(Timestamp.valueOf("2015-12-27 00:00:00")))) + } } test("function make_interval") {