Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -4373,6 +4373,12 @@
},
"sqlState" : "42000"
},
"JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED" : {
"message" : [
"Both '<jdbcTableName>' and '<jdbcQueryString>' can not be specified at the same time."
],
"sqlState" : "42613"
},
"JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE" : {
"message" : [
"The join condition <joinCondition> has the invalid type <conditionType>, expected \"BOOLEAN\"."
Expand Down Expand Up @@ -9122,11 +9128,6 @@
"Unsupported field name: <fieldName>."
]
},
"_LEGACY_ERROR_TEMP_2078" : {
"message" : [
"Both '<jdbcTableName>' and '<jdbcQueryString>' can not be specified at the same time."
]
},
"_LEGACY_ERROR_TEMP_2079" : {
"message" : [
"Option '<jdbcTableName>' or '<jdbcQueryString>' is required."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1001,7 +1001,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
def cannotSpecifyBothJdbcTableNameAndQueryError(
jdbcTableName: String, jdbcQueryString: String): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2078",
errorClass = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED",
messageParameters = Map(
"jdbcTableName" -> jdbcTableName,
"jdbcQueryString" -> jdbcQueryString))
Expand Down
53 changes: 31 additions & 22 deletions sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1727,33 +1727,42 @@ class JDBCSuite extends QueryTest with SharedSparkSession {
test("query JDBC option - negative tests") {
val query = "SELECT * FROM test.people WHERE theid = 1"
// load path
val e1 = intercept[RuntimeException] {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", query)
.option("dbtable", "test.people")
.load()
}.getMessage
assert(e1.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
checkError(
exception = intercept[SparkIllegalArgumentException] {
spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", query)
.option("dbtable", "test.people")
.load()
},
condition = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED",
parameters = Map("jdbcTableName" -> "dbtable", "jdbcQueryString" -> "query")
)

// jdbc api path
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_QUERY_STRING, query)
val e2 = intercept[RuntimeException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect()
}.getMessage
assert(e2.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
checkError(
exception = intercept[SparkIllegalArgumentException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect()
},
condition = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED",
parameters = Map("jdbcTableName" -> "dbtable", "jdbcQueryString" -> "query")
)

val e3 = intercept[RuntimeException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))
}.getMessage
assert(e3.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
checkError(
exception = intercept[SparkIllegalArgumentException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))
},
condition = "JDBC_TABLE_AND_QUERY_BOTH_SPECIFIED",
parameters = Map("jdbcTableName" -> "dbtable", "jdbcQueryString" -> "query")
)

val e4 = intercept[RuntimeException] {
val df = spark.read.format("jdbc")
Expand Down