diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 1c1f0a9eedf5a..5caa5ac4223ee 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -4373,6 +4373,12 @@ }, "sqlState" : "42000" }, + "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED" : { + "message" : [ + "Both '' and '' can not be specified at the same time." + ], + "sqlState" : "42613" + }, "JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE" : { "message" : [ "The join condition has the invalid type , expected \"BOOLEAN\"." @@ -9122,11 +9128,6 @@ "Unsupported field name: ." ] }, - "_LEGACY_ERROR_TEMP_2078" : { - "message" : [ - "Both '' and '' can not be specified at the same time." - ] - }, "_LEGACY_ERROR_TEMP_2079" : { "message" : [ "Option '' or '' is required." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 8985bdb519d19..8a179a9f92aae 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -1001,7 +1001,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE def cannotSpecifyBothJdbcTableNameAndQueryError( jdbcTableName: String, jdbcQueryString: String): SparkIllegalArgumentException = { new SparkIllegalArgumentException( - errorClass = "_LEGACY_ERROR_TEMP_2078", + errorClass = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED", messageParameters = Map( "jdbcTableName" -> jdbcTableName, "jdbcQueryString" -> jdbcQueryString)) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index 3268d532a34cd..f2879ccf14934 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -1727,33 +1727,42 @@ class JDBCSuite extends QueryTest with SharedSparkSession { test("query JDBC option - negative tests") { val query = "SELECT * FROM test.people WHERE theid = 1" // load path - val e1 = intercept[RuntimeException] { - val df = spark.read.format("jdbc") - .option("Url", urlWithUserAndPass) - .option("query", query) - .option("dbtable", "test.people") - .load() - }.getMessage - assert(e1.contains("Both 'dbtable' and 'query' can not be specified at the same time.")) + checkError( + exception = intercept[SparkIllegalArgumentException] { + spark.read.format("jdbc") + .option("Url", urlWithUserAndPass) + .option("query", query) + .option("dbtable", "test.people") + .load() + }, + condition = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED", + parameters = Map("jdbcTableName" -> "dbtable", "jdbcQueryString" -> "query") + ) // jdbc api path val properties = new Properties() properties.setProperty(JDBCOptions.JDBC_QUERY_STRING, query) - val e2 = intercept[RuntimeException] { - spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect() - }.getMessage - assert(e2.contains("Both 'dbtable' and 'query' can not be specified at the same time.")) + checkError( + exception = intercept[SparkIllegalArgumentException] { + spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect() + }, + condition = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED", + parameters = Map("jdbcTableName" -> "dbtable", "jdbcQueryString" -> "query") + ) - val e3 = intercept[RuntimeException] { - sql( - s""" - |CREATE OR REPLACE TEMPORARY VIEW queryOption - |USING org.apache.spark.sql.jdbc - |OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE', - | user 'testUser', password 'testPass') - """.stripMargin.replaceAll("\n", " ")) - }.getMessage - assert(e3.contains("Both 'dbtable' and 'query' can not be specified at the same time.")) + checkError( + exception = intercept[SparkIllegalArgumentException] { + sql( + s""" + |CREATE OR REPLACE TEMPORARY VIEW queryOption + |USING org.apache.spark.sql.jdbc + |OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE', + | user 'testUser', password 'testPass') + """.stripMargin.replaceAll("\n", " ")) + }, + condition = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED", + parameters = Map("jdbcTableName" -> "dbtable", "jdbcQueryString" -> "query") + ) val e4 = intercept[RuntimeException] { val df = spark.read.format("jdbc")