[KYUUBI #5399] [AUTHZ] Cleanup Spark 3.0 specific implementation

### _Why are the changes needed?_

The cleanup follow-up for #5362, which removed the Spark 3.0 tests for Authz plugin.
Remove the `isSpark31OrGreater` and Spark 3.0 specific implementations in Authz plugin.

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request

### _Was this patch authored or co-authored using generative AI tooling?_

No.

Closes #5399 from bowenliang123/authz-31greater.

Closes #5399

db4369b13 [Bowen Liang] import
cc38b1c1e [Bowen Liang] fix MASK_SHOW_FIRST_4
cf3ef4e1c [Bowen Liang] remove isSparkV31OrGreater

Authored-by: Bowen Liang <liangbowen@gf.com.cn>
Signed-off-by: Bowen Liang <liangbowen@gf.com.cn>
This commit is contained in:
Bowen Liang 2023-10-12 08:44:02 +08:00
parent e51095edaa
commit 98b74d2ad0
2 changed files with 2 additions and 7 deletions

View File

@ -26,7 +26,6 @@ import org.apache.ranger.plugin.service.RangerBasePlugin
import org.slf4j.LoggerFactory
import org.apache.kyuubi.plugin.spark.authz.AccessControlException
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.plugin.spark.authz.util.RangerConfigProvider
object SparkRangerAdminPlugin extends RangerBasePlugin("spark", "sparkSql")
@ -109,11 +108,8 @@ object SparkRangerAdminPlugin extends RangerBasePlugin("spark", "sparkSql")
} else if (result.getMaskTypeDef != null) {
result.getMaskTypeDef.getName match {
case "MASK" => regexp_replace(col)
case "MASK_SHOW_FIRST_4" if isSparkV31OrGreater =>
regexp_replace(col, hasLen = true)
case "MASK_SHOW_FIRST_4" =>
val right = regexp_replace(s"substr($col, 5)")
s"concat(substr($col, 0, 4), $right)"
regexp_replace(col, hasLen = true)
case "MASK_SHOW_LAST_4" =>
val left = regexp_replace(s"left($col, length($col) - 4)")
s"concat($left, right($col, 4))"

View File

@ -61,7 +61,7 @@ private[authz] object AuthZUtils {
def hasResolvedPermanentView(plan: LogicalPlan): Boolean = {
plan match {
case view: View if view.resolved && isSparkV31OrGreater =>
case view: View if view.resolved =>
!getField[Boolean](view, "isTempView")
case _ =>
false
@ -84,7 +84,6 @@ private[authz] object AuthZUtils {
}
lazy val SPARK_RUNTIME_VERSION: SemanticVersion = SemanticVersion(SPARK_VERSION)
lazy val isSparkV31OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.1"
lazy val isSparkV32OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.2"
lazy val isSparkV33OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.3"