Skip to content

Commit

Permalink
Add getCondition and deprecate getErrorClass in SparkThrowable
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxGekk committed Sep 22, 2024
1 parent 067f8f1 commit 984e674
Show file tree
Hide file tree
Showing 82 changed files with 198 additions and 188 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ class CollationFactorySuite extends AnyFunSuite with Matchers { // scalastyle:ig
val e = intercept[SparkException] {
fetchCollation(collationName)
}
assert(e.getErrorClass === "COLLATION_INVALID_NAME")
assert(e.getCondition === "COLLATION_INVALID_NAME")
assert(e.getMessageParameters.asScala === Map(
"collationName" -> collationName, "proposals" -> proposals))
}
Expand Down
20 changes: 15 additions & 5 deletions common/utils/src/main/java/org/apache/spark/SparkThrowable.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,19 +35,29 @@
*/
@Evolving
public interface SparkThrowable {
// Succinct, human-readable, unique, and consistent representation of the error category
// If null, error class is not set
String getErrorClass();
/**
* Succinct, human-readable, unique, and consistent representation of the error condition.
* If null, error condition is not set.
*/
String getCondition();

/**
* Succinct, human-readable, unique, and consistent representation of the error category.
* If null, error class is not set.
* @deprecated Use {@link #getCondition()} instead.
*/
@Deprecated
default String getErrorClass() { return getCondition(); }

// Portable error identifier across SQL engines
// If null, error class or SQLSTATE is not set
default String getSqlState() {
return SparkThrowableHelper.getSqlState(this.getErrorClass());
return SparkThrowableHelper.getSqlState(this.getCondition());
}

// True if this error is an internal error.
default boolean isInternalError() {
return SparkThrowableHelper.isInternalError(this.getErrorClass());
return SparkThrowableHelper.isInternalError(this.getCondition());
}

default Map<String, String> getMessageParameters() {
Expand Down
34 changes: 17 additions & 17 deletions common/utils/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class SparkException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull

override def getQueryContext: Array[QueryContext] = context
}
Expand Down Expand Up @@ -179,7 +179,7 @@ private[spark] class SparkUpgradeException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
}

/**
Expand Down Expand Up @@ -212,7 +212,7 @@ private[spark] class SparkArithmeticException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand Down Expand Up @@ -250,7 +250,7 @@ private[spark] class SparkUnsupportedOperationException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
}

private[spark] object SparkUnsupportedOperationException {
Expand Down Expand Up @@ -280,7 +280,7 @@ private[spark] class SparkClassNotFoundException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass
}

/**
Expand All @@ -296,7 +296,7 @@ private[spark] class SparkConcurrentModificationException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass
}

/**
Expand Down Expand Up @@ -329,7 +329,7 @@ private[spark] class SparkDateTimeException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand All @@ -345,7 +345,7 @@ private[spark] class SparkFileNotFoundException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass
}

/**
Expand Down Expand Up @@ -379,7 +379,7 @@ private[spark] class SparkNumberFormatException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand Down Expand Up @@ -431,7 +431,7 @@ private[spark] class SparkIllegalArgumentException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand Down Expand Up @@ -460,7 +460,7 @@ private[spark] class SparkRuntimeException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand Down Expand Up @@ -489,7 +489,7 @@ private[spark] class SparkPythonException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand All @@ -507,7 +507,7 @@ private[spark] class SparkNoSuchElementException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass

override def getQueryContext: Array[QueryContext] = context
}
Expand All @@ -524,7 +524,7 @@ private[spark] class SparkSecurityException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass
}

/**
Expand Down Expand Up @@ -558,7 +558,7 @@ private[spark] class SparkArrayIndexOutOfBoundsException private(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getCondition: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

Expand All @@ -574,7 +574,7 @@ private[spark] class SparkSQLException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass
}

/**
Expand All @@ -589,5 +589,5 @@ private[spark] class SparkSQLFeatureNotSupportedException(

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass
}
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ private[spark] object SparkThrowableHelper {
import ErrorMessageFormat._
format match {
case PRETTY => e.getMessage
case MINIMAL | STANDARD if e.getErrorClass == null =>
case MINIMAL | STANDARD if e.getCondition == null =>
toJsonString { generator =>
val g = generator.useDefaultPrettyPrinter()
g.writeStartObject()
Expand All @@ -92,7 +92,7 @@ private[spark] object SparkThrowableHelper {
g.writeEndObject()
}
case MINIMAL | STANDARD =>
val errorClass = e.getErrorClass
val errorClass = e.getCondition
toJsonString { generator =>
val g = generator.useDefaultPrettyPrinter()
g.writeStartObject()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ class StreamingQueryException private[sql](
s"""${classOf[StreamingQueryException].getName}: ${cause.getMessage}
|$queryDebugString""".stripMargin

override def getErrorClass: String = errorClass
override def getCondition: String = errorClass

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava
}
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ abstract class AvroLogicalTypeSuite extends QueryTest with SharedSparkSession {
val ex = intercept[SparkException] {
spark.read.format("avro").load(s"$dir.avro").collect()
}
assert(ex.getErrorClass.startsWith("FAILED_READ_FILE"))
assert(ex.getCondition.startsWith("FAILED_READ_FILE"))
checkError(
exception = ex.getCause.asInstanceOf[SparkArithmeticException],
condition = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -891,7 +891,7 @@ abstract class AvroSuite
val ex = intercept[SparkException] {
spark.read.schema("a DECIMAL(4, 3)").format("avro").load(path.toString).collect()
}
assert(ex.getErrorClass.startsWith("FAILED_READ_FILE"))
assert(ex.getCondition.startsWith("FAILED_READ_FILE"))
checkError(
exception = ex.getCause.asInstanceOf[AnalysisException],
condition = "AVRO_INCOMPATIBLE_READ_TYPE",
Expand Down Expand Up @@ -969,7 +969,7 @@ abstract class AvroSuite
val ex = intercept[SparkException] {
spark.read.schema(s"a $sqlType").format("avro").load(path.toString).collect()
}
assert(ex.getErrorClass.startsWith("FAILED_READ_FILE"))
assert(ex.getCondition.startsWith("FAILED_READ_FILE"))
checkError(
exception = ex.getCause.asInstanceOf[AnalysisException],
condition = "AVRO_INCOMPATIBLE_READ_TYPE",
Expand Down Expand Up @@ -1006,7 +1006,7 @@ abstract class AvroSuite
val ex = intercept[SparkException] {
spark.read.schema(s"a $sqlType").format("avro").load(path.toString).collect()
}
assert(ex.getErrorClass.startsWith("FAILED_READ_FILE"))
assert(ex.getCondition.startsWith("FAILED_READ_FILE"))
checkError(
exception = ex.getCause.asInstanceOf[AnalysisException],
condition = "AVRO_INCOMPATIBLE_READ_TYPE",
Expand Down Expand Up @@ -1515,7 +1515,7 @@ abstract class AvroSuite
.write.format("avro").option("avroSchema", avroSchema)
.save(s"$tempDir/${UUID.randomUUID()}")
}
assert(ex.getErrorClass == "TASK_WRITE_FAILED")
assert(ex.getCondition == "TASK_WRITE_FAILED")
assert(ex.getCause.isInstanceOf[java.lang.NullPointerException])
assert(ex.getCause.getMessage.contains(
"null value for (non-nullable) string at test_schema.Name"))
Expand Down Expand Up @@ -2629,7 +2629,7 @@ abstract class AvroSuite
val e = intercept[SparkException] {
df.write.format("avro").option("avroSchema", avroSchema).save(path3_x)
}
assert(e.getErrorClass == "TASK_WRITE_FAILED")
assert(e.getCondition == "TASK_WRITE_FAILED")
assert(e.getCause.isInstanceOf[SparkUpgradeException])
}
checkDefaultLegacyRead(oldPath)
Expand Down Expand Up @@ -2884,7 +2884,7 @@ abstract class AvroSuite
val e = intercept[SparkException] {
df.write.format("avro").option("avroSchema", avroSchema).save(dir.getCanonicalPath)
}
assert(e.getErrorClass == "TASK_WRITE_FAILED")
assert(e.getCondition == "TASK_WRITE_FAILED")
val errMsg = e.getCause.asInstanceOf[SparkUpgradeException].getMessage
assert(errMsg.contains("You may get a different result due to the upgrading"))
}
Expand All @@ -2895,7 +2895,7 @@ abstract class AvroSuite
val e = intercept[SparkException] {
df.write.format("avro").save(dir.getCanonicalPath)
}
assert(e.getErrorClass == "TASK_WRITE_FAILED")
assert(e.getCondition == "TASK_WRITE_FAILED")
val errMsg = e.getCause.asInstanceOf[SparkUpgradeException].getMessage
assert(errMsg.contains("You may get a different result due to the upgrading"))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class CatalogSuite extends ConnectFunSuite with RemoteSparkSession with SQLHelpe
val exception = intercept[SparkException] {
spark.catalog.setCurrentCatalog("notExists")
}
assert(exception.getErrorClass == "CATALOG_NOT_FOUND")
assert(exception.getCondition == "CATALOG_NOT_FOUND")
spark.catalog.setCurrentCatalog("testcat")
assert(spark.catalog.currentCatalog().equals("testcat"))
val catalogsAfterChange = spark.catalog.listCatalogs().collect()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,16 +251,16 @@ class ClientDataFrameStatSuite extends ConnectFunSuite with RemoteSparkSession {
val error1 = intercept[AnalysisException] {
df.stat.bloomFilter("id", -1000, 100)
}
assert(error1.getErrorClass === "DATATYPE_MISMATCH.VALUE_OUT_OF_RANGE")
assert(error1.getCondition === "DATATYPE_MISMATCH.VALUE_OUT_OF_RANGE")

val error2 = intercept[AnalysisException] {
df.stat.bloomFilter("id", 1000, -100)
}
assert(error2.getErrorClass === "DATATYPE_MISMATCH.VALUE_OUT_OF_RANGE")
assert(error2.getCondition === "DATATYPE_MISMATCH.VALUE_OUT_OF_RANGE")

val error3 = intercept[AnalysisException] {
df.stat.bloomFilter("id", 1000, -1.0)
}
assert(error3.getErrorClass === "DATATYPE_MISMATCH.VALUE_OUT_OF_RANGE")
assert(error3.getCondition === "DATATYPE_MISMATCH.VALUE_OUT_OF_RANGE")
}
}
Loading

0 comments on commit 984e674

Please sign in to comment.