Skip to content

Commit bbcc438

Browse files
belieferMaxGekk
authored andcommitted
[SPARK-43915][SQL] Assign names to the error class _LEGACY_ERROR_TEMP_[2438-2445]
### What changes were proposed in this pull request? The pr aims to assign names to the error class _LEGACY_ERROR_TEMP_[2438-2445]. ### Why are the changes needed? Improve the error framework. ### Does this PR introduce _any_ user-facing change? 'No'. ### How was this patch tested? Exists test cases updated. Closes #41553 from beliefer/SPARK-43915. Authored-by: Jiaan Geng <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent a4fb7cc commit bbcc438

File tree

19 files changed

+195
-104
lines changed

19 files changed

+195
-104
lines changed

core/src/main/resources/error/error-classes.json

+20-27
Original file line numberDiff line numberDiff line change
@@ -643,6 +643,11 @@
643643
],
644644
"sqlState" : "23505"
645645
},
646+
"DUPLICATED_METRICS_NAME" : {
647+
"message" : [
648+
"The metric name is not unique: <metricName>. The same name cannot be used for metrics with different results. However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)."
649+
]
650+
},
646651
"DUPLICATE_CLAUSES" : {
647652
"message" : [
648653
"Found duplicate clauses: <clauseName>. Please, remove one of them."
@@ -1237,6 +1242,11 @@
12371242
}
12381243
}
12391244
},
1245+
"INVALID_NON_DETERMINISTIC_EXPRESSIONS" : {
1246+
"message" : [
1247+
"The operator expects a deterministic expression, but the actual expression is <sqlExprs>."
1248+
]
1249+
},
12401250
"INVALID_NUMERIC_LITERAL_RANGE" : {
12411251
"message" : [
12421252
"Numeric literal <rawStrippedQualifier> is outside the valid range for <typeName> with minimum value of <minValue> and maximum value of <maxValue>. Please adjust the value accordingly."
@@ -1512,6 +1522,11 @@
15121522
],
15131523
"sqlState" : "42604"
15141524
},
1525+
"INVALID_UDF_IMPLEMENTATION" : {
1526+
"message" : [
1527+
"Function <funcName> does not implement ScalarFunction or AggregateFunction."
1528+
]
1529+
},
15151530
"INVALID_URL" : {
15161531
"message" : [
15171532
"The url is invalid: <url>. If necessary set <ansiConfig> to \"false\" to bypass this error."
@@ -2458,6 +2473,11 @@
24582473
"<property> is a reserved namespace property, <msg>."
24592474
]
24602475
},
2476+
"SET_OPERATION_ON_MAP_TYPE" : {
2477+
"message" : [
2478+
"Cannot have MAP type columns in DataFrame which calls set operations (INTERSECT, EXCEPT, etc.), but the type of column <colName> is <dataType>."
2479+
]
2480+
},
24612481
"SET_PROPERTIES_AND_DBPROPERTIES" : {
24622482
"message" : [
24632483
"set PROPERTIES and DBPROPERTIES at the same time."
@@ -5659,33 +5679,6 @@
56595679
"Conflicting attributes: <conflictingAttributes>."
56605680
]
56615681
},
5662-
"_LEGACY_ERROR_TEMP_2438" : {
5663-
"message" : [
5664-
"Cannot have map type columns in DataFrame which calls set operations(intersect, except, etc.), but the type of column <colName> is <dataType>."
5665-
]
5666-
},
5667-
"_LEGACY_ERROR_TEMP_2439" : {
5668-
"message" : [
5669-
"nondeterministic expressions are only allowed in Project, Filter, Aggregate or Window, found:",
5670-
"<sqlExprs>",
5671-
"in operator <operator>."
5672-
]
5673-
},
5674-
"_LEGACY_ERROR_TEMP_2443" : {
5675-
"message" : [
5676-
"Multiple definitions of observed metrics named '<name>': <plan>."
5677-
]
5678-
},
5679-
"_LEGACY_ERROR_TEMP_2444" : {
5680-
"message" : [
5681-
"Function '<funcName>' does not implement ScalarFunction or AggregateFunction."
5682-
]
5683-
},
5684-
"_LEGACY_ERROR_TEMP_2445" : {
5685-
"message" : [
5686-
"grouping() can only be used with GroupingSets/Cube/Rollup."
5687-
]
5688-
},
56895682
"_LEGACY_ERROR_TEMP_2446" : {
56905683
"message" : [
56915684
"Operation not allowed: <cmd> only works on table with location provided: <tableIdentWithDB>"

python/pyspark/sql/tests/test_udtf.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -350,7 +350,9 @@ def eval(self, a: int):
350350

351351
random_udtf = udtf(RandomUDTF, returnType="x: int").asNondeterministic()
352352
# TODO(SPARK-43966): support non-deterministic UDTFs
353-
with self.assertRaisesRegex(AnalysisException, "nondeterministic expressions"):
353+
with self.assertRaisesRegex(
354+
AnalysisException, "The operator expects a deterministic expression"
355+
):
354356
random_udtf(lit(1)).collect()
355357

356358
def test_udtf_with_nondeterministic_input(self):
@@ -362,7 +364,9 @@ def eval(self, a: int):
362364
yield a + 1,
363365

364366
# TODO(SPARK-43966): support non-deterministic UDTFs
365-
with self.assertRaisesRegex(AnalysisException, "nondeterministic expressions"):
367+
with self.assertRaisesRegex(
368+
AnalysisException, " The operator expects a deterministic expression"
369+
):
366370
TestUDTF(rand(0) * 100).collect()
367371

368372
def test_udtf_no_eval(self):

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -2326,8 +2326,8 @@ class Analyzer(override val catalogManager: CatalogManager) extends RuleExecutor
23262326
processV2AggregateFunction(aggFunc, arguments, u)
23272327
case _ =>
23282328
failAnalysis(
2329-
errorClass = "_LEGACY_ERROR_TEMP_2444",
2330-
messageParameters = Map("funcName" -> bound.name()))
2329+
errorClass = "INVALID_UDF_IMPLEMENTATION",
2330+
messageParameters = Map("funcName" -> toSQLId(bound.name())))
23312331
}
23322332
}
23332333

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala

+11-12
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,8 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
302302
"\nReplacement is unresolved: " + e.replacement)
303303

304304
case g: Grouping =>
305-
g.failAnalysis(errorClass = "_LEGACY_ERROR_TEMP_2445", messageParameters = Map.empty)
305+
g.failAnalysis(
306+
errorClass = "UNSUPPORTED_GROUPING_EXPRESSION", messageParameters = Map.empty)
306307
case g: GroupingID =>
307308
g.failAnalysis(
308309
errorClass = "UNSUPPORTED_GROUPING_EXPRESSION", messageParameters = Map.empty)
@@ -721,10 +722,10 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
721722
case o if mapColumnInSetOperation(o).isDefined =>
722723
val mapCol = mapColumnInSetOperation(o).get
723724
o.failAnalysis(
724-
errorClass = "_LEGACY_ERROR_TEMP_2438",
725+
errorClass = "UNSUPPORTED_FEATURE.SET_OPERATION_ON_MAP_TYPE",
725726
messageParameters = Map(
726-
"colName" -> mapCol.name,
727-
"dataType" -> mapCol.dataType.catalogString))
727+
"colName" -> toSQLId(mapCol.name),
728+
"dataType" -> toSQLType(mapCol.dataType)))
728729

729730
case o if o.expressions.exists(!_.deterministic) &&
730731
!o.isInstanceOf[Project] && !o.isInstanceOf[Filter] &&
@@ -734,10 +735,9 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
734735
!o.isInstanceOf[LateralJoin] =>
735736
// The rule above is used to check Aggregate operator.
736737
o.failAnalysis(
737-
errorClass = "_LEGACY_ERROR_TEMP_2439",
738-
messageParameters = Map(
739-
"sqlExprs" -> o.expressions.map(_.sql).mkString(","),
740-
"operator" -> operator.simpleString(SQLConf.get.maxToStringFields)))
738+
errorClass = "INVALID_NON_DETERMINISTIC_EXPRESSIONS",
739+
messageParameters = Map("sqlExprs" -> o.expressions.map(toSQLExpr(_)).mkString(", "))
740+
)
741741

742742
case _: UnresolvedHint => throw new IllegalStateException(
743743
"Logical hint operator should be removed during analysis.")
@@ -868,6 +868,7 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
868868
private def scrubOutIds(string: String): String =
869869
string.replaceAll("#\\d+", "#x")
870870
.replaceAll("operator id = \\d+", "operator id = #x")
871+
.replaceAll("rand\\(-?\\d+\\)", "rand(number)")
871872

872873
private def planToString(plan: LogicalPlan): String = {
873874
if (Utils.isTesting) scrubOutIds(plan.toString) else plan.toString
@@ -1056,10 +1057,8 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
10561057
// of a CTE that is used multiple times or a self join.
10571058
if (!simplifiedMetrics.sameResult(simplifiedOther)) {
10581059
failAnalysis(
1059-
errorClass = "_LEGACY_ERROR_TEMP_2443",
1060-
messageParameters = Map(
1061-
"name" -> name,
1062-
"plan" -> plan.toString))
1060+
errorClass = "DUPLICATED_METRICS_NAME",
1061+
messageParameters = Map("metricName" -> name))
10631062
}
10641063
case None =>
10651064
metricsMap.put(name, metrics)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala

+19-9
Original file line numberDiff line numberDiff line change
@@ -821,25 +821,35 @@ class AnalysisSuite extends AnalysisTest with Matchers {
821821
CollectMetrics("evt1", count :: Nil, testRelation) :: Nil))
822822

823823
// Same children, structurally different metrics - fail
824-
assertAnalysisError(Union(
825-
CollectMetrics("evt1", count :: Nil, testRelation) ::
826-
CollectMetrics("evt1", sum :: Nil, testRelation) :: Nil),
827-
"Multiple definitions of observed metrics" :: "evt1" :: Nil)
824+
assertAnalysisErrorClass(
825+
Union(
826+
CollectMetrics("evt1", count :: Nil, testRelation) ::
827+
CollectMetrics("evt1", sum :: Nil, testRelation) :: Nil),
828+
expectedErrorClass = "DUPLICATED_METRICS_NAME",
829+
expectedMessageParameters = Map("metricName" -> "evt1")
830+
)
828831

829832
// Different children, same metrics - fail
830833
val b = $"b".string
831834
val tblB = LocalRelation(b)
832-
assertAnalysisError(Union(
833-
CollectMetrics("evt1", count :: Nil, testRelation) ::
834-
CollectMetrics("evt1", count :: Nil, tblB) :: Nil),
835-
"Multiple definitions of observed metrics" :: "evt1" :: Nil)
835+
assertAnalysisErrorClass(
836+
Union(
837+
CollectMetrics("evt1", count :: Nil, testRelation) ::
838+
CollectMetrics("evt1", count :: Nil, tblB) :: Nil),
839+
expectedErrorClass = "DUPLICATED_METRICS_NAME",
840+
expectedMessageParameters = Map("metricName" -> "evt1")
841+
)
836842

837843
// Subquery different tree - fail
838844
val subquery = Aggregate(Nil, sum :: Nil, CollectMetrics("evt1", count :: Nil, testRelation))
839845
val query = Project(
840846
b :: ScalarSubquery(subquery, Nil).as("sum") :: Nil,
841847
CollectMetrics("evt1", count :: Nil, tblB))
842-
assertAnalysisError(query, "Multiple definitions of observed metrics" :: "evt1" :: Nil)
848+
assertAnalysisErrorClass(
849+
query,
850+
expectedErrorClass = "DUPLICATED_METRICS_NAME",
851+
expectedMessageParameters = Map("metricName" -> "evt1")
852+
)
843853

844854
// Aggregate with filter predicate - fail
845855
val sumWithFilter = sum.transform {

sql/core/src/test/resources/sql-tests/analyzer-results/group-analytics.sql.out

+1-1
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,7 @@ SELECT course, year, GROUPING(course) FROM courseSales GROUP BY course, year
332332
-- !query analysis
333333
org.apache.spark.sql.AnalysisException
334334
{
335-
"errorClass" : "_LEGACY_ERROR_TEMP_2445",
335+
"errorClass" : "UNSUPPORTED_GROUPING_EXPRESSION",
336336
"queryContext" : [ {
337337
"objectType" : "",
338338
"objectName" : "",

sql/core/src/test/resources/sql-tests/analyzer-results/join-lateral.sql.out

+2-2
Original file line numberDiff line numberDiff line change
@@ -480,7 +480,7 @@ org.apache.spark.sql.AnalysisException
480480
"errorClass" : "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.NON_DETERMINISTIC_LATERAL_SUBQUERIES",
481481
"sqlState" : "0A000",
482482
"messageParameters" : {
483-
"treeNode" : "LateralJoin lateral-subquery#x [c1#x && c2#x], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x]\n: +- OneRowRelation\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
483+
"treeNode" : "LateralJoin lateral-subquery#x [c1#x && c2#x], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(number)) AS c3#x]\n: +- OneRowRelation\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
484484
},
485485
"queryContext" : [ {
486486
"objectType" : "",
@@ -500,7 +500,7 @@ org.apache.spark.sql.AnalysisException
500500
"errorClass" : "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.NON_DETERMINISTIC_LATERAL_SUBQUERIES",
501501
"sqlState" : "0A000",
502502
"messageParameters" : {
503-
"treeNode" : "LateralJoin lateral-subquery#x [], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [rand(0) AS rand(0)#x]\n: +- SubqueryAlias spark_catalog.default.t2\n: +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n: +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n: +- LocalRelation [col1#x, col2#x]\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
503+
"treeNode" : "LateralJoin lateral-subquery#x [], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [rand(number) AS rand(number)#x]\n: +- SubqueryAlias spark_catalog.default.t2\n: +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n: +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n: +- LocalRelation [col1#x, col2#x]\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
504504
},
505505
"queryContext" : [ {
506506
"objectType" : "",

sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-analytics.sql.out

+1-1
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ SELECT course, udf(year), GROUPING(course) FROM courseSales GROUP BY course, udf
205205
-- !query analysis
206206
org.apache.spark.sql.AnalysisException
207207
{
208-
"errorClass" : "_LEGACY_ERROR_TEMP_2445",
208+
"errorClass" : "UNSUPPORTED_GROUPING_EXPRESSION",
209209
"queryContext" : [ {
210210
"objectType" : "",
211211
"objectName" : "",

sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out

+1-1
Original file line numberDiff line numberDiff line change
@@ -466,7 +466,7 @@ struct<>
466466
-- !query output
467467
org.apache.spark.sql.AnalysisException
468468
{
469-
"errorClass" : "_LEGACY_ERROR_TEMP_2445",
469+
"errorClass" : "UNSUPPORTED_GROUPING_EXPRESSION",
470470
"queryContext" : [ {
471471
"objectType" : "",
472472
"objectName" : "",

sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out

+2-2
Original file line numberDiff line numberDiff line change
@@ -360,7 +360,7 @@ org.apache.spark.sql.AnalysisException
360360
"errorClass" : "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.NON_DETERMINISTIC_LATERAL_SUBQUERIES",
361361
"sqlState" : "0A000",
362362
"messageParameters" : {
363-
"treeNode" : "LateralJoin lateral-subquery#x [c1#x && c2#x], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x]\n: +- OneRowRelation\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
363+
"treeNode" : "LateralJoin lateral-subquery#x [c1#x && c2#x], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(number)) AS c3#x]\n: +- OneRowRelation\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
364364
},
365365
"queryContext" : [ {
366366
"objectType" : "",
@@ -382,7 +382,7 @@ org.apache.spark.sql.AnalysisException
382382
"errorClass" : "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.NON_DETERMINISTIC_LATERAL_SUBQUERIES",
383383
"sqlState" : "0A000",
384384
"messageParameters" : {
385-
"treeNode" : "LateralJoin lateral-subquery#x [], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [rand(0) AS rand(0)#x]\n: +- SubqueryAlias spark_catalog.default.t2\n: +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n: +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n: +- LocalRelation [col1#x, col2#x]\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
385+
"treeNode" : "LateralJoin lateral-subquery#x [], Inner\n: +- SubqueryAlias __auto_generated_subquery_name\n: +- Project [rand(number) AS rand(number)#x]\n: +- SubqueryAlias spark_catalog.default.t2\n: +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n: +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n: +- LocalRelation [col1#x, col2#x]\n+- SubqueryAlias spark_catalog.default.t1\n +- View (`spark_catalog`.`default`.`t1`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n"
386386
},
387387
"queryContext" : [ {
388388
"objectType" : "",

sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out

+1-1
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ struct<>
208208
-- !query output
209209
org.apache.spark.sql.AnalysisException
210210
{
211-
"errorClass" : "_LEGACY_ERROR_TEMP_2445",
211+
"errorClass" : "UNSUPPORTED_GROUPING_EXPRESSION",
212212
"queryContext" : [ {
213213
"objectType" : "",
214214
"objectName" : "",

sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala

+32-12
Original file line numberDiff line numberDiff line change
@@ -352,20 +352,40 @@ class DataFrameSetOperationsSuite extends QueryTest with SharedSparkSession {
352352

353353
test("SPARK-19893: cannot run set operations with map type") {
354354
val df = spark.range(1).select(map(lit("key"), $"id").as("m"))
355-
val e = intercept[AnalysisException](df.intersect(df))
356-
assert(e.message.contains(
357-
"Cannot have map type columns in DataFrame which calls set operations"))
358-
val e2 = intercept[AnalysisException](df.except(df))
359-
assert(e2.message.contains(
360-
"Cannot have map type columns in DataFrame which calls set operations"))
361-
val e3 = intercept[AnalysisException](df.distinct())
362-
assert(e3.message.contains(
363-
"Cannot have map type columns in DataFrame which calls set operations"))
355+
checkError(
356+
exception = intercept[AnalysisException](df.intersect(df)),
357+
errorClass = "UNSUPPORTED_FEATURE.SET_OPERATION_ON_MAP_TYPE",
358+
parameters = Map(
359+
"colName" -> "`m`",
360+
"dataType" -> "\"MAP<STRING, BIGINT>\"")
361+
)
362+
checkError(
363+
exception = intercept[AnalysisException](df.except(df)),
364+
errorClass = "UNSUPPORTED_FEATURE.SET_OPERATION_ON_MAP_TYPE",
365+
parameters = Map(
366+
"colName" -> "`m`",
367+
"dataType" -> "\"MAP<STRING, BIGINT>\"")
368+
)
369+
checkError(
370+
exception = intercept[AnalysisException](df.distinct()),
371+
errorClass = "UNSUPPORTED_FEATURE.SET_OPERATION_ON_MAP_TYPE",
372+
parameters = Map(
373+
"colName" -> "`m`",
374+
"dataType" -> "\"MAP<STRING, BIGINT>\"")
375+
)
364376
withTempView("v") {
365377
df.createOrReplaceTempView("v")
366-
val e4 = intercept[AnalysisException](sql("SELECT DISTINCT m FROM v"))
367-
assert(e4.message.contains(
368-
"Cannot have map type columns in DataFrame which calls set operations"))
378+
checkError(
379+
exception = intercept[AnalysisException](sql("SELECT DISTINCT m FROM v")),
380+
errorClass = "UNSUPPORTED_FEATURE.SET_OPERATION_ON_MAP_TYPE",
381+
parameters = Map(
382+
"colName" -> "`m`",
383+
"dataType" -> "\"MAP<STRING, BIGINT>\""),
384+
context = ExpectedContext(
385+
fragment = "SELECT DISTINCT m FROM v",
386+
start = 0,
387+
stop = 23)
388+
)
369389
}
370390
}
371391

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala

+11-2
Original file line numberDiff line numberDiff line change
@@ -446,8 +446,17 @@ class DataSourceV2FunctionSuite extends DatasourceV2SQLBase {
446446
catalog("testcat").asInstanceOf[SupportsNamespaces].createNamespace(Array("ns"), emptyProps)
447447
addFunction(Identifier.of(Array("ns"), "strlen"), StrLen(BadBoundFunction))
448448

449-
assert(intercept[AnalysisException](sql("SELECT testcat.ns.strlen('abc')"))
450-
.getMessage.contains("does not implement ScalarFunction or AggregateFunction"))
449+
checkError(
450+
exception = intercept[AnalysisException](
451+
sql("SELECT testcat.ns.strlen('abc')")),
452+
errorClass = "INVALID_UDF_IMPLEMENTATION",
453+
parameters = Map(
454+
"funcName" -> "`bad_bound_func`"),
455+
context = ExpectedContext(
456+
fragment = "testcat.ns.strlen('abc')",
457+
start = 7,
458+
stop = 30)
459+
)
451460
}
452461

453462
test("aggregate function: lookup int average") {

sql/core/src/test/scala/org/apache/spark/sql/connector/DeleteFromTableSuiteBase.scala

+1-14
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.connector
1919

20-
import org.apache.spark.sql.{AnalysisException, Row}
20+
import org.apache.spark.sql.Row
2121
import org.apache.spark.sql.execution.datasources.v2.{DeleteFromTableExec, ReplaceDataExec, WriteDeltaExec}
2222

2323
abstract class DeleteFromTableSuiteBase extends RowLevelOperationSuiteBase {
@@ -449,19 +449,6 @@ abstract class DeleteFromTableSuiteBase extends RowLevelOperationSuiteBase {
449449
}
450450
}
451451

452-
test("delete with nondeterministic conditions") {
453-
createAndInitTable("pk INT NOT NULL, id INT, dep STRING",
454-
"""{ "pk": 1, "id": 1, "dep": "hr" }
455-
|{ "pk": 2, "id": 2, "dep": "software" }
456-
|{ "pk": 3, "id": 3, "dep": "hr" }
457-
|""".stripMargin)
458-
459-
val e = intercept[AnalysisException] {
460-
sql(s"DELETE FROM $tableNameAsString WHERE id <= 1 AND rand() > 0.5")
461-
}
462-
assert(e.message.contains("nondeterministic expressions are only allowed"))
463-
}
464-
465452
test("delete without condition executed as delete with filters") {
466453
createAndInitTable("pk INT NOT NULL, id INT, dep INT",
467454
"""{ "pk": 1, "id": 1, "dep": 100 }

0 commit comments

Comments
 (0)