Skip to content

Commit 4f567f4

Browse files
committed
[SPARK-39272][SQL] Increase the start position of query context by 1
### What changes were proposed in this pull request? Increase the start position of query context by 1 ### Why are the changes needed? Currently, the line number starts from 1, while the start position starts from 0. Thus it's better to increase the start position by 1 for consistency. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? UT Closes #36651 from gengliangwang/increase1. Authored-by: Gengliang Wang <[email protected]> Signed-off-by: Gengliang Wang <[email protected]>
1 parent 9823bb3 commit 4f567f4

22 files changed

+133
-131
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,9 @@ case class Origin(
7979
""
8080
} else {
8181
val positionContext = if (line.isDefined && startPosition.isDefined) {
82-
s"(line ${line.get}, position ${startPosition.get})"
82+
// Note that the line number starts from 1, while the start position starts from 0.
83+
// Here we increase the start position by 1 for consistency.
84+
s"(line ${line.get}, position ${startPosition.get + 1})"
8385
} else {
8486
""
8587
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -876,7 +876,7 @@ class TreeNodeSuite extends SparkFunSuite with SQLHelper {
876876
objectType = Some("VIEW"),
877877
objectName = Some("some_view"))
878878
val expected =
879-
"""== SQL of VIEW some_view(line 3, position 38) ==
879+
"""== SQL of VIEW some_view(line 3, position 39) ==
880880
|...7890 + 1234567890 + 1234567890, cast('a'
881881
| ^^^^^^^^
882882
|as /* comment */

sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out

Lines changed: 35 additions & 35 deletions
Large diffs are not rendered by default.

sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ struct<>
233233
-- !query output
234234
org.apache.spark.SparkDateTimeException
235235
[CAST_INVALID_INPUT] The value 'xx' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
236-
== SQL(line 1, position 7) ==
236+
== SQL(line 1, position 8) ==
237237
select next_day("xx", "Mon")
238238
^^^^^^^^^^^^^^^^^^^^^
239239

@@ -328,7 +328,7 @@ struct<>
328328
-- !query output
329329
org.apache.spark.SparkNumberFormatException
330330
[CAST_INVALID_INPUT] The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
331-
== SQL(line 1, position 7) ==
331+
== SQL(line 1, position 8) ==
332332
select date_add('2011-11-11', '1.2')
333333
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
334334

@@ -439,7 +439,7 @@ struct<>
439439
-- !query output
440440
org.apache.spark.SparkNumberFormatException
441441
[CAST_INVALID_INPUT] The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
442-
== SQL(line 1, position 7) ==
442+
== SQL(line 1, position 8) ==
443443
select date_sub(date'2011-11-11', '1.2')
444444
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
445445

sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ struct<>
251251
-- !query output
252252
org.apache.spark.SparkDateTimeException
253253
[CAST_INVALID_INPUT] The value 'Unparseable' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
254-
== SQL(line 1, position 7) ==
254+
== SQL(line 1, position 8) ==
255255
select cast("Unparseable" as timestamp)
256256
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
257257

@@ -263,6 +263,6 @@ struct<>
263263
-- !query output
264264
org.apache.spark.SparkDateTimeException
265265
[CAST_INVALID_INPUT] The value 'Unparseable' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
266-
== SQL(line 1, position 7) ==
266+
== SQL(line 1, position 8) ==
267267
select cast("Unparseable" as date)
268268
^^^^^^^^^^^^^^^^^^^^^^^^^^^

sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ struct<>
7777
-- !query output
7878
org.apache.spark.SparkArithmeticException
7979
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
80-
== SQL(line 1, position 7) ==
80+
== SQL(line 1, position 8) ==
8181
select (5e36BD + 0.1) + 5e36BD
8282
^^^^^^^^^^^^^^^^^^^^^^^
8383

@@ -89,7 +89,7 @@ struct<>
8989
-- !query output
9090
org.apache.spark.SparkArithmeticException
9191
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
92-
== SQL(line 1, position 7) ==
92+
== SQL(line 1, position 8) ==
9393
select (-4e36BD - 0.1) - 7e36BD
9494
^^^^^^^^^^^^^^^^^^^^^^^^
9595

@@ -101,7 +101,7 @@ struct<>
101101
-- !query output
102102
org.apache.spark.SparkArithmeticException
103103
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 152415787532388367501905199875019052100, 39, 0) cannot be represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
104-
== SQL(line 1, position 7) ==
104+
== SQL(line 1, position 8) ==
105105
select 12345678901234567890.0 * 12345678901234567890.0
106106
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
107107

@@ -113,7 +113,7 @@ struct<>
113113
-- !query output
114114
org.apache.spark.SparkArithmeticException
115115
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1000000000000000000000000000000000000.00000000000000000000000000000000000000, 75, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
116-
== SQL(line 1, position 7) ==
116+
== SQL(line 1, position 8) ==
117117
select 1e35BD / 0.1
118118
^^^^^^^^^^^^
119119

@@ -149,7 +149,7 @@ struct<>
149149
-- !query output
150150
org.apache.spark.SparkArithmeticException
151151
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10123456789012345678901234567890123456.00000000000000000000000000000000000000, 76, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
152-
== SQL(line 1, position 7) ==
152+
== SQL(line 1, position 8) ==
153153
select 1.0123456789012345678901234567890123456e36BD / 0.1
154154
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
155155

@@ -161,7 +161,7 @@ struct<>
161161
-- !query output
162162
org.apache.spark.SparkArithmeticException
163163
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 101234567890123456789012345678901234.56000000000000000000000000000000000000, 74, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
164-
== SQL(line 1, position 7) ==
164+
== SQL(line 1, position 8) ==
165165
select 1.0123456789012345678901234567890123456e35BD / 1.0
166166
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
167167

@@ -173,7 +173,7 @@ struct<>
173173
-- !query output
174174
org.apache.spark.SparkArithmeticException
175175
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10123456789012345678901234567890123.45600000000000000000000000000000000000, 73, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
176-
== SQL(line 1, position 7) ==
176+
== SQL(line 1, position 8) ==
177177
select 1.0123456789012345678901234567890123456e34BD / 1.0
178178
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
179179

@@ -185,7 +185,7 @@ struct<>
185185
-- !query output
186186
org.apache.spark.SparkArithmeticException
187187
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1012345678901234567890123456789012.34560000000000000000000000000000000000, 72, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
188-
== SQL(line 1, position 7) ==
188+
== SQL(line 1, position 8) ==
189189
select 1.0123456789012345678901234567890123456e33BD / 1.0
190190
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
191191

@@ -197,7 +197,7 @@ struct<>
197197
-- !query output
198198
org.apache.spark.SparkArithmeticException
199199
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
200-
== SQL(line 1, position 7) ==
200+
== SQL(line 1, position 8) ==
201201
select 1.0123456789012345678901234567890123456e32BD / 1.0
202202
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
203203

@@ -217,7 +217,7 @@ struct<>
217217
-- !query output
218218
org.apache.spark.SparkArithmeticException
219219
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
220-
== SQL(line 1, position 7) ==
220+
== SQL(line 1, position 8) ==
221221
select 1.0123456789012345678901234567890123456e31BD / 0.1
222222
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
223223

sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ struct<>
123123
-- !query output
124124
org.apache.spark.SparkNumberFormatException
125125
[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
126-
== SQL(line 1, position 7) ==
126+
== SQL(line 1, position 8) ==
127127
select interval 2 second * 'a'
128128
^^^^^^^^^^^^^^^^^^^^^^^
129129

@@ -135,7 +135,7 @@ struct<>
135135
-- !query output
136136
org.apache.spark.SparkNumberFormatException
137137
[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
138-
== SQL(line 1, position 7) ==
138+
== SQL(line 1, position 8) ==
139139
select interval 2 second / 'a'
140140
^^^^^^^^^^^^^^^^^^^^^^^
141141

@@ -147,7 +147,7 @@ struct<>
147147
-- !query output
148148
org.apache.spark.SparkNumberFormatException
149149
[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
150-
== SQL(line 1, position 7) ==
150+
== SQL(line 1, position 8) ==
151151
select interval 2 year * 'a'
152152
^^^^^^^^^^^^^^^^^^^^^
153153

@@ -159,7 +159,7 @@ struct<>
159159
-- !query output
160160
org.apache.spark.SparkNumberFormatException
161161
[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
162-
== SQL(line 1, position 7) ==
162+
== SQL(line 1, position 8) ==
163163
select interval 2 year / 'a'
164164
^^^^^^^^^^^^^^^^^^^^^
165165

@@ -187,7 +187,7 @@ struct<>
187187
-- !query output
188188
org.apache.spark.SparkNumberFormatException
189189
[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
190-
== SQL(line 1, position 7) ==
190+
== SQL(line 1, position 8) ==
191191
select 'a' * interval 2 second
192192
^^^^^^^^^^^^^^^^^^^^^^^
193193

@@ -199,7 +199,7 @@ struct<>
199199
-- !query output
200200
org.apache.spark.SparkNumberFormatException
201201
[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
202-
== SQL(line 1, position 7) ==
202+
== SQL(line 1, position 8) ==
203203
select 'a' * interval 2 year
204204
^^^^^^^^^^^^^^^^^^^^^
205205

@@ -229,7 +229,7 @@ struct<>
229229
-- !query output
230230
org.apache.spark.SparkArithmeticException
231231
[DIVIDE_BY_ZERO] Division by zero. To return NULL instead, use `try_divide`. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error.
232-
== SQL(line 1, position 7) ==
232+
== SQL(line 1, position 8) ==
233233
select interval '2 seconds' / 0
234234
^^^^^^^^^^^^^^^^^^^^^^^^
235235

@@ -265,7 +265,7 @@ struct<>
265265
-- !query output
266266
org.apache.spark.SparkArithmeticException
267267
[DIVIDE_BY_ZERO] Division by zero. To return NULL instead, use `try_divide`. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error.
268-
== SQL(line 1, position 7) ==
268+
== SQL(line 1, position 8) ==
269269
select interval '2' year / 0
270270
^^^^^^^^^^^^^^^^^^^^^
271271

@@ -665,7 +665,7 @@ struct<>
665665
-- !query output
666666
org.apache.spark.SparkArithmeticException
667667
[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1234567890123456789, 20, 0) cannot be represented as Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
668-
== SQL(line 1, position 7) ==
668+
== SQL(line 1, position 8) ==
669669
select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789)
670670
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
671671

@@ -1517,7 +1517,7 @@ struct<>
15171517
-- !query output
15181518
org.apache.spark.SparkDateTimeException
15191519
[CAST_INVALID_INPUT] The value '4 11:11' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
1520-
== SQL(line 1, position 7) ==
1520+
== SQL(line 1, position 8) ==
15211521
select '4 11:11' - interval '4 22:12' day to minute
15221522
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
15231523

@@ -1529,7 +1529,7 @@ struct<>
15291529
-- !query output
15301530
org.apache.spark.SparkDateTimeException
15311531
[CAST_INVALID_INPUT] The value '4 12:12:12' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
1532-
== SQL(line 1, position 7) ==
1532+
== SQL(line 1, position 8) ==
15331533
select '4 12:12:12' + interval '4 22:12' day to minute
15341534
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
15351535

@@ -1567,7 +1567,7 @@ struct<>
15671567
-- !query output
15681568
org.apache.spark.SparkDateTimeException
15691569
[CAST_INVALID_INPUT] The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
1570-
== SQL(line 1, position 7) ==
1570+
== SQL(line 1, position 8) ==
15711571
select str - interval '4 22:12' day to minute from interval_view
15721572
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
15731573

@@ -1579,7 +1579,7 @@ struct<>
15791579
-- !query output
15801580
org.apache.spark.SparkDateTimeException
15811581
[CAST_INVALID_INPUT] The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
1582-
== SQL(line 1, position 7) ==
1582+
== SQL(line 1, position 8) ==
15831583
select str + interval '4 22:12' day to minute from interval_view
15841584
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
15851585

@@ -2037,7 +2037,7 @@ struct<>
20372037
-- !query output
20382038
org.apache.spark.SparkArithmeticException
20392039
[ARITHMETIC_OVERFLOW] Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to "false" (except for ANSI interval type) to bypass this error.
2040-
== SQL(line 1, position 7) ==
2040+
== SQL(line 1, position 8) ==
20412041
SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1
20422042
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
20432043

@@ -2049,7 +2049,7 @@ struct<>
20492049
-- !query output
20502050
org.apache.spark.SparkArithmeticException
20512051
[ARITHMETIC_OVERFLOW] Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to "false" (except for ANSI interval type) to bypass this error.
2052-
== SQL(line 1, position 7) ==
2052+
== SQL(line 1, position 8) ==
20532053
SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L
20542054
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
20552055

@@ -2095,7 +2095,7 @@ struct<>
20952095
-- !query output
20962096
org.apache.spark.SparkArithmeticException
20972097
[ARITHMETIC_OVERFLOW] Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to "false" (except for ANSI interval type) to bypass this error.
2098-
== SQL(line 1, position 7) ==
2098+
== SQL(line 1, position 8) ==
20992099
SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1
21002100
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
21012101

@@ -2107,7 +2107,7 @@ struct<>
21072107
-- !query output
21082108
org.apache.spark.SparkArithmeticException
21092109
[ARITHMETIC_OVERFLOW] Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to "false" (except for ANSI interval type) to bypass this error.
2110-
== SQL(line 1, position 7) ==
2110+
== SQL(line 1, position 8) ==
21112111
SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L
21122112
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
21132113

sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ struct<>
99
-- !query output
1010
org.apache.spark.SparkNoSuchElementException
1111
[MAP_KEY_DOES_NOT_EXIST] Key 5 does not exist. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
12-
== SQL(line 1, position 7) ==
12+
== SQL(line 1, position 8) ==
1313
select element_at(map(1, 'a', 2, 'b'), 5)
1414
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1515

@@ -21,7 +21,7 @@ struct<>
2121
-- !query output
2222
org.apache.spark.SparkNoSuchElementException
2323
[MAP_KEY_DOES_NOT_EXIST] Key 5 does not exist. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
24-
== SQL(line 1, position 7) ==
24+
== SQL(line 1, position 8) ==
2525
select map(1, 'a', 2, 'b')[5]
2626
^^^^^^^^^^^^^^^^^^^^^^
2727

@@ -115,7 +115,7 @@ struct<>
115115
-- !query output
116116
org.apache.spark.SparkNoSuchElementException
117117
[MAP_KEY_DOES_NOT_EXIST] Key 5 does not exist. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
118-
== SQL(line 1, position 7) ==
118+
== SQL(line 1, position 8) ==
119119
select element_at(map(1, 'a', 2, 'b'), 5)
120120
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
121121

@@ -127,6 +127,6 @@ struct<>
127127
-- !query output
128128
org.apache.spark.SparkNoSuchElementException
129129
[MAP_KEY_DOES_NOT_EXIST] Key 'c' does not exist. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
130-
== SQL(line 1, position 7) ==
130+
== SQL(line 1, position 8) ==
131131
select element_at(map('a', 1, 'b', 2), 'c')
132132
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

0 commit comments

Comments
 (0)