[KYUUBI #6640] [AUTHZ] Adapt Derby 10.16 new JDBC driver package name

# 🔍 Description

SPARK-46257 (Spark 4.0.0) moves to Derby 10.16, `org.apache.derby.jdbc.AutoloadedDriver` has been moved to `org.apache.derby.iapi.jdbc.AutoloadedDriver`

## Types of changes 🔖

- [ ] Bugfix (non-breaking change which fixes an issue)
- [x] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

Manually tested with Spark 4.0.

---

# Checklist 📝

- [x] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

**Be nice. Be informative.**

Closes #6640 from pan3793/authz-derby.

Closes #6640

46edb32be [Cheng Pan] Update extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
7eee47f0d [Cheng Pan] Adapt Derby 10.16 new JDBC driver package name

Authored-by: Cheng Pan <chengpan@apache.org>
Signed-off-by: Cheng Pan <chengpan@apache.org>
This commit is contained in:
Cheng Pan 2024-08-23 12:27:48 +08:00
parent d8b83c7c3b
commit d5c31a85a4
No known key found for this signature in database
GPG Key ID: 8001952629BCC75D
5 changed files with 17 additions and 14 deletions

View File

@ -88,11 +88,19 @@ private[authz] object AuthZUtils {
lazy val isSparkV33OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.3"
lazy val isSparkV34OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.4"
lazy val isSparkV35OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.5"
lazy val isSparkV40OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "4.0"
lazy val SCALA_RUNTIME_VERSION: SemanticVersion =
SemanticVersion(scala.util.Properties.versionNumberString)
lazy val isScalaV213: Boolean = SCALA_RUNTIME_VERSION >= "2.13"
def derbyJdbcDriverClass: String = if (isSparkV40OrGreater) {
// SPARK-46257 (Spark 4.0.0) moves to Derby 10.16
"org.apache.derby.iapi.jdbc.AutoloadedDriver"
} else {
"org.apache.derby.jdbc.AutoloadedDriver"
}
def quoteIfNeeded(part: String): String = {
if (part.matches("[a-zA-Z0-9_]+") && !part.matches("\\d+")) {
part

View File

@ -24,6 +24,7 @@ import org.scalatest.Outcome
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.serde._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.AssertionUtils._
class V2JdbcTableCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite {
@ -41,9 +42,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite
override def beforeAll(): Unit = {
spark.conf.set(s"spark.sql.catalog.$catalogV2", v2JdbcTableCatalogClassName)
spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
spark.conf.set(
s"spark.sql.catalog.$catalogV2.driver",
"org.apache.derby.jdbc.AutoloadedDriver")
spark.conf.set(s"spark.sql.catalog.$catalogV2.driver", derbyJdbcDriverClass)
super.beforeAll()
}

View File

@ -47,9 +47,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSu
override def beforeAll(): Unit = {
spark.conf.set(s"spark.sql.catalog.$catalogV2", v2JdbcTableCatalogClassName)
spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
spark.conf.set(
s"spark.sql.catalog.$catalogV2.driver",
"org.apache.derby.jdbc.AutoloadedDriver")
spark.conf.set(s"spark.sql.catalog.$catalogV2.driver", derbyJdbcDriverClass)
super.beforeAll()

View File

@ -24,16 +24,15 @@ import org.apache.spark.SparkConf
import org.scalatest.Outcome
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
override protected val extraSparkConf: SparkConf = {
new SparkConf()
.set("spark.sql.defaultCatalog", "testcat")
.set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
.set(s"spark.sql.catalog.testcat.url", "jdbc:derby:memory:testcat;create=true")
.set(
s"spark.sql.catalog.testcat.driver",
"org.apache.derby.jdbc.AutoloadedDriver")
.set("spark.sql.catalog.testcat.url", "jdbc:derby:memory:testcat;create=true")
.set("spark.sql.catalog.testcat.driver", derbyJdbcDriverClass)
}
override protected val catalogImpl: String = "in-memory"

View File

@ -25,16 +25,15 @@ import org.apache.spark.SparkConf
import org.scalatest.Outcome
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
class RowFilteringForJDBCV2Suite extends RowFilteringTestBase {
override protected val extraSparkConf: SparkConf = {
new SparkConf()
.set("spark.sql.defaultCatalog", "testcat")
.set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
.set(s"spark.sql.catalog.testcat.url", "jdbc:derby:memory:testcat;create=true")
.set(
s"spark.sql.catalog.testcat.driver",
"org.apache.derby.jdbc.AutoloadedDriver")
.set("spark.sql.catalog.testcat.url", "jdbc:derby:memory:testcat;create=true")
.set("spark.sql.catalog.testcat.driver", derbyJdbcDriverClass)
}
override protected val catalogImpl: String = "in-memory"