[KYUUBI #4873] [AUTHZ] Refactor Authz reflection with kyuubi-util's DynMethods

### _Why are the changes needed?_

- add reflection utils in kyuubi-util-scala, using kyuubi-util's DynMethods
- continue to provided simplified reflection calling in scala

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request

Closes #4873 from bowenliang123/authz-reflect.

Closes #4873

d0a508400 [liangbowen] import
95d4760ad [Cheng Pan] Update kyuubi-util-scala/src/main/scala/org/apache/kyuubi/util/reflect/ReflectUtils.scala
83e70f09b [liangbowen] authz reflect

Lead-authored-by: liangbowen <liangbowen@gf.com.cn>
Co-authored-by: Cheng Pan <pan3793@gmail.com>
Signed-off-by: Cheng Pan <chengpan@apache.org>
This commit is contained in:
liangbowen 2023-05-23 15:25:54 +08:00 committed by Cheng Pan
parent aee9b946f3
commit 320178bb68
No known key found for this signature in database
GPG Key ID: 8001952629BCC75D
11 changed files with 69 additions and 68 deletions

View File

@ -28,6 +28,7 @@ import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType
import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
import org.apache.kyuubi.plugin.spark.authz.serde._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
object PrivilegesBuilder {

View File

@ -27,7 +27,7 @@ import org.apache.ranger.plugin.policyengine.{RangerAccessRequestImpl, RangerPol
import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType
import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
case class AccessRequest private (accessType: AccessType) extends RangerAccessRequestImpl

View File

@ -26,6 +26,7 @@ import org.apache.spark.sql.execution.command.{RunnableCommand, ShowColumnsComma
import org.apache.kyuubi.plugin.spark.authz.{ObjectType, OperationType}
import org.apache.kyuubi.plugin.spark.authz.util.{AuthZUtils, ObjectFilterPlaceHolder, WithInternalChildren}
import org.apache.kyuubi.util.reflect.ReflectUtils._
class RuleReplaceShowObjectCommands extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan match {
@ -48,7 +49,7 @@ class RuleReplaceShowObjectCommands extends Rule[LogicalPlan] {
case class FilteredShowTablesCommand(delegated: RunnableCommand)
extends FilteredShowObjectCommand(delegated) {
var isExtended: Boolean = AuthZUtils.getFieldVal(delegated, "isExtended").asInstanceOf[Boolean]
private val isExtended = getFieldVal[Boolean](delegated, "isExtended")
override protected def isAllowed(r: Row, ugi: UserGroupInformation): Boolean = {
val database = r.getString(0)

View File

@ -34,7 +34,7 @@ import org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor.queryExtractors
import org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor.tableExtractors
import org.apache.kyuubi.plugin.spark.authz.serde.TableType.TableType
import org.apache.kyuubi.plugin.spark.authz.serde.TableTypeExtractor.tableTypeExtractors
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
/**
* A database object(such as database, table, function) descriptor describes its name and getter

View File

@ -17,7 +17,7 @@
package org.apache.kyuubi.plugin.spark.authz.serde
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
trait CatalogExtractor extends (AnyRef => Option[String]) with Extractor

View File

@ -18,6 +18,7 @@
package org.apache.kyuubi.plugin.spark.authz.serde
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
trait DatabaseExtractor extends (AnyRef => Database) with Extractor

View File

@ -27,6 +27,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
/**
* A trait for extracting database and table as string tuple

View File

@ -23,8 +23,6 @@ import java.security.interfaces.ECPublicKey
import java.security.spec.X509EncodedKeySpec
import java.util.Base64
import scala.util.{Failure, Success, Try}
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.security.UserGroupInformation
import org.apache.ranger.plugin.service.RangerBasePlugin
@ -34,67 +32,10 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, View}
import org.apache.kyuubi.plugin.spark.authz.AccessControlException
import org.apache.kyuubi.plugin.spark.authz.util.ReservedKeys._
import org.apache.kyuubi.util.SemanticVersion
import org.apache.kyuubi.util.reflect.ReflectUtils._
private[authz] object AuthZUtils {
/**
* fixme error handling need improve here
*/
def getFieldVal[T](o: Any, name: String): T = {
Try {
val field = o.getClass.getDeclaredField(name)
field.setAccessible(true)
field.get(o)
} match {
case Success(value) => value.asInstanceOf[T]
case Failure(e) =>
val candidates = o.getClass.getDeclaredFields.map(_.getName).mkString("[", ",", "]")
throw new RuntimeException(s"$name not in ${o.getClass} $candidates", e)
}
}
def getFieldValOpt[T](o: Any, name: String): Option[T] = Try(getFieldVal[T](o, name)).toOption
def invoke(
obj: AnyRef,
methodName: String,
args: (Class[_], AnyRef)*): AnyRef = {
try {
val (types, values) = args.unzip
val method = obj.getClass.getMethod(methodName, types: _*)
method.setAccessible(true)
method.invoke(obj, values: _*)
} catch {
case e: NoSuchMethodException =>
val candidates = obj.getClass.getMethods.map(_.getName).mkString("[", ",", "]")
throw new RuntimeException(s"$methodName not in ${obj.getClass} $candidates", e)
}
}
def invokeAs[T](
obj: AnyRef,
methodName: String,
args: (Class[_], AnyRef)*): T = {
invoke(obj, methodName, args: _*).asInstanceOf[T]
}
def invokeStatic(
obj: Class[_],
methodName: String,
args: (Class[_], AnyRef)*): AnyRef = {
val (types, values) = args.unzip
val method = obj.getMethod(methodName, types: _*)
method.setAccessible(true)
method.invoke(obj, values: _*)
}
def invokeStaticAs[T](
obj: Class[_],
methodName: String,
args: (Class[_], AnyRef)*): T = {
invokeStatic(obj, methodName, args: _*).asInstanceOf[T]
}
/**
* Get the active session user
* @param spark spark context instance

View File

@ -20,6 +20,7 @@ package org.apache.kyuubi.plugin.spark.authz.util
import org.apache.hadoop.conf.Configuration
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.DynMethods
trait RangerConfigProvider {
@ -36,12 +37,16 @@ trait RangerConfigProvider {
def getRangerConf: Configuration = {
if (isRanger21orGreater) {
// for Ranger 2.1+
invokeAs[Configuration](this, "getConfig")
DynMethods.builder("getConfig")
.impl("org.apache.ranger.plugin.service.RangerBasePlugin")
.build()
.invoke[Configuration](this)
} else {
// for Ranger 2.0 and below
invokeStaticAs[Configuration](
Class.forName("org.apache.ranger.authorization.hadoop.config.RangerConfiguration"),
"getInstance")
DynMethods.builder("getInstance")
.impl("org.apache.ranger.authorization.hadoop.config.RangerConfiguration")
.buildStatic()
.invoke[Configuration]()
}
}
}

View File

@ -35,6 +35,7 @@ import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import org.apache.kyuubi.plugin.spark.authz.ranger.RuleAuthorization.KYUUBI_AUTHZ_TAG
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
abstract class RangerSparkExtensionSuite extends AnyFunSuite
with SparkSessionProvider with BeforeAndAfterAll {

View File

@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kyuubi.util.reflect
import scala.util.{Failure, Success, Try}
object ReflectUtils {
def getFieldVal[T](target: Any, fieldName: String): T =
Try {
DynFields.builder().hiddenImpl(target.getClass, fieldName).build[T]().get(target)
} match {
case Success(value) => value
case Failure(e) =>
val candidates = target.getClass.getDeclaredFields.map(_.getName).mkString("[", ",", "]")
throw new RuntimeException(s"$fieldName not in ${target.getClass} $candidates", e)
}
def getFieldValOpt[T](target: Any, name: String): Option[T] =
Try(getFieldVal[T](target, name)).toOption
def invoke(target: AnyRef, methodName: String, args: (Class[_], AnyRef)*): AnyRef =
try {
val (types, values) = args.unzip
DynMethods.builder(methodName).hiddenImpl(target.getClass, types: _*).build()
.invoke(target, values: _*)
} catch {
case e: NoSuchMethodException =>
val candidates = target.getClass.getMethods.map(_.getName).mkString("[", ",", "]")
throw new RuntimeException(s"$methodName not in ${target.getClass} $candidates", e)
}
def invokeAs[T](target: AnyRef, methodName: String, args: (Class[_], AnyRef)*): T =
invoke(target, methodName, args: _*).asInstanceOf[T]
}