diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala index 6a085d714ddf9..8ca3f76c3b885 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala @@ -21,10 +21,10 @@ import java.lang.{Boolean => JBoolean} import java.util.IdentityHashMap import scala.collection.mutable +import scala.util.control.NonFatal import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.SQLConfHelper -import org.apache.spark.sql.catalyst.analysis.UnresolvedException import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.rules.RuleId import org.apache.spark.sql.catalyst.rules.UnknownRuleId @@ -56,6 +56,15 @@ abstract class QueryPlan[PlanType <: QueryPlan[PlanType]] def output: Seq[Attribute] + /** + * Returns a string representation of this node with output column information appended, + * including each column's nullability. If `output` has more than `maxColumns` entries, only the + * first `maxColumns` are shown with a count of the remaining ones. + * If we encounter a [[NonFatal]], it's high likely that the call of `this.output` + * ([[UnresolvedException]] by calling e.g. `dataType` on unresolved expression or + * [[CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE]] by calling `Union.output` before type coercing it) + * throws it. In this case, falls back to showing just the node name. + */ override def nodeWithOutputColumnsString(maxColumns: Int): String = { try { nodeName + { @@ -75,9 +84,7 @@ abstract class QueryPlan[PlanType <: QueryPlan[PlanType]] } } } catch { - case _: UnresolvedException => - // If we encounter an UnresolvedException, it's high likely that the call of `this.output` - // throws it. In this case, we may have to give up and only show the nodeName. + case NonFatal(_) => nodeName + " " } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 74cdee49e55a9..5a78e0519eb15 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -5088,6 +5088,13 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark checkAnswer(sql(query), Row(1, 2)) } } + + gridTest("SPARK-55811: Catch NonFatal instead of UnresolvedException when calling " + + "nodeWithOutputColumnsString")(Seq("TRACE", "DEBUG", "INFO", "WARN", "ERROR")) { level => + withSQLConf(SQLConf.PLAN_CHANGE_LOG_LEVEL.key -> level) { + checkAnswer(sql("SELECT 1L UNION SELECT 1"), Row(1L)) + } + } } case class Foo(bar: Option[String])