Skip to content

Commit b6eadf0

Browse files
itholicMaxGekk
authored andcommitted
[SPARK-42302][SQL] Assign name to _LEGACY_ERROR_TEMP_2135
### What changes were proposed in this pull request? This PR proposes to assign name to _LEGACY_ERROR_TEMP_2135, "FAILED_PARSE_EMPTY_STRING". ### Why are the changes needed? We should assign proper name to _LEGACY_ERROR_TEMP_* ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*"` Closes #39872 from itholic/LEGACY_2135. Lead-authored-by: itholic <[email protected]> Co-authored-by: Haejoon Lee <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent 17e3ee0 commit b6eadf0

File tree

4 files changed

+18
-16
lines changed

4 files changed

+18
-16
lines changed

core/src/main/resources/error/error-classes.json

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -506,6 +506,12 @@
506506
],
507507
"sqlState" : "22003"
508508
},
509+
"EMPTY_JSON_FIELD_VALUE" : {
510+
"message" : [
511+
"Failed to parse an empty string for data type <dataType>."
512+
],
513+
"sqlState" : "42604"
514+
},
509515
"ENCODER_NOT_FOUND" : {
510516
"message" : [
511517
"Not found an encoder of the type <typeName> to Spark SQL internal representation. Consider to change the input type to one of supported at https://spark.apache.org/docs/latest/sql-ref-datatypes.html."
@@ -4237,11 +4243,6 @@
42374243
"Cannot parse field value <value> for pattern <pattern> as target spark data type [<dataType>]."
42384244
]
42394245
},
4240-
"_LEGACY_ERROR_TEMP_2135" : {
4241-
"message" : [
4242-
"Failed to parse an empty string for data type <dataType>."
4243-
]
4244-
},
42454246
"_LEGACY_ERROR_TEMP_2138" : {
42464247
"message" : [
42474248
"Cannot have circular references in bean class, but got the circular reference of class <clazz>."

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -420,12 +420,12 @@ class JacksonParser(
420420
case VALUE_STRING if parser.getTextLength < 1 && allowEmptyString =>
421421
dataType match {
422422
case FloatType | DoubleType | TimestampType | DateType =>
423-
throw QueryExecutionErrors.failToParseEmptyStringForDataTypeError(dataType)
423+
throw QueryExecutionErrors.emptyJsonFieldValueError(dataType)
424424
case _ => null
425425
}
426426

427427
case VALUE_STRING if parser.getTextLength < 1 =>
428-
throw QueryExecutionErrors.failToParseEmptyStringForDataTypeError(dataType)
428+
throw QueryExecutionErrors.emptyJsonFieldValueError(dataType)
429429

430430
case token =>
431431
// We cannot parse this token based on the given data type. So, we throw a

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1431,11 +1431,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
14311431
"dataType" -> dataType.toString()))
14321432
}
14331433

1434-
def failToParseEmptyStringForDataTypeError(dataType: DataType): SparkRuntimeException = {
1434+
def emptyJsonFieldValueError(dataType: DataType): SparkRuntimeException = {
14351435
new SparkRuntimeException(
1436-
errorClass = "_LEGACY_ERROR_TEMP_2135",
1437-
messageParameters = Map(
1438-
"dataType" -> dataType.catalogString))
1436+
errorClass = "EMPTY_JSON_FIELD_VALUE",
1437+
messageParameters = Map("dataType" -> toSQLType(dataType)))
14391438
}
14401439

14411440
def cannotParseJSONFieldError(parser: JsonParser, jsonType: JsonToken, dataType: DataType)

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import org.apache.spark.rdd.RDD
3535
import org.apache.spark.sql.{functions => F, _}
3636
import org.apache.spark.sql.catalyst.json._
3737
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils}
38+
import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLType
3839
import org.apache.spark.sql.execution.ExternalRDD
3940
import org.apache.spark.sql.execution.datasources.{CommonFileDataSourceSuite, DataSource, InMemoryFileIndex, NoopCache}
4041
import org.apache.spark.sql.execution.datasources.v2.json.JsonScanBuilder
@@ -2608,11 +2609,12 @@ abstract class JsonSuite
26082609
private def failedOnEmptyString(dataType: DataType): Unit = {
26092610
val df = spark.read.schema(s"a ${dataType.catalogString}")
26102611
.option("mode", "FAILFAST").json(Seq("""{"a":""}""").toDS)
2611-
val errMessage = intercept[SparkException] {
2612-
df.collect()
2613-
}.getMessage
2614-
assert(errMessage.contains(
2615-
s"Failed to parse an empty string for data type ${dataType.catalogString}"))
2612+
val e = intercept[SparkException] {df.collect()}
2613+
checkError(
2614+
exception = e.getCause.getCause.getCause.asInstanceOf[SparkRuntimeException],
2615+
errorClass = "EMPTY_JSON_FIELD_VALUE",
2616+
parameters = Map("dataType" -> toSQLType(dataType))
2617+
)
26162618
}
26172619

26182620
private def emptyString(dataType: DataType, expected: Any): Unit = {

0 commit comments

Comments
 (0)