[KYUUBI #5271][Bug] AnalyzeTableCommand should also add table write privilege

### _Why are the changes needed?_
Since AnalyzeTableCommand also update table's metadata, since alter command also need table write privilege, AnalyzeTableCommand need too

### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [ ] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request

### _Was this patch authored or co-authored using generative AI tooling?_

Closes #5272 from AngersZhuuuu/KYUUBI-5271.

Closes #5271

ad5c70403 [Angerszhuuuu] Merge branch 'KYUUBI-5271' of https://github.com/AngersZhuuuu/incubator-kyuubi into KYUUBI-5271
a5932b7e8 [Angerszhuuuu] Update TableCommands.scala
97ee3299c [Angerszhuuuu] Merge branch 'master' into KYUUBI-5271
cdd8100fd [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
5f110563e [Angerszhuuuu] update
92c30454b [Angerszhuuuu] Revert "Update TableCommands.scala"
504ff2a26 [Angerszhuuuu] Update TableCommands.scala
6c4233680 [Angerszhuuuu] [KYUUBI #5271][Bug] AnalyzeTableCommand should also add table write privilege

Authored-by: Angerszhuuuu <angers.zhu@gmail.com>
Signed-off-by: Kent Yao <yao@apache.org>
This commit is contained in:
Angerszhuuuu 2023-09-13 19:49:57 +08:00 committed by Kent Yao
parent d15322d568
commit b21ae88545
No known key found for this signature in database
GPG Key ID: F7051850A0AF904D
3 changed files with 78 additions and 17 deletions

View File

@ -865,6 +865,15 @@
}, {
"classname" : "org.apache.spark.sql.execution.command.AnalyzeColumnCommand",
"tableDescs" : [ {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : null,
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
"setCurrentDatabaseIfMissing" : false
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
@ -889,11 +898,20 @@
"isInput" : true,
"setCurrentDatabaseIfMissing" : false
} ],
"opType" : "ANALYZE_TABLE",
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AnalyzePartitionCommand",
"tableDescs" : [ {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : null,
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
"setCurrentDatabaseIfMissing" : false
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
@ -906,11 +924,20 @@
"isInput" : true,
"setCurrentDatabaseIfMissing" : false
} ],
"opType" : "ANALYZE_TABLE",
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AnalyzeTableCommand",
"tableDescs" : [ {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : null,
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
"setCurrentDatabaseIfMissing" : false
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : null,
@ -920,7 +947,7 @@
"isInput" : true,
"setCurrentDatabaseIfMissing" : false
} ],
"opType" : "ANALYZE_TABLE",
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CacheTableCommand",

View File

@ -379,7 +379,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
val plan = sql(s"ANALYZE TABLE $reusedPartTable PARTITION (pid=1)" +
s" COMPUTE STATISTICS FOR COLUMNS key").queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === ANALYZE_TABLE)
assert(operationType === ALTERTABLE_PROPERTIES)
assert(in.size === 1)
val po0 = in.head
assert(po0.actionType === PrivilegeObjectActionType.OTHER)
@ -390,16 +390,27 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(po0.columns === Seq("key"))
checkTableOwner(po0)
val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
assert(accessType0 === AccessType.SELECT)
assert(accessType0 === AccessType.ALTER)
assert(out.size === 1)
val po1 = out.head
assert(po1.actionType === PrivilegeObjectActionType.OTHER)
assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assertEqualsIgnoreCase(reusedDb)(po1.dbname)
assertEqualsIgnoreCase(reusedPartTableShort)(po1.objectName)
// ignore this check as it behaves differently across spark versions
assert(po1.columns.isEmpty)
checkTableOwner(po1)
val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
assert(accessType1 === AccessType.ALTER)
assert(out.size === 0)
}
test("AnalyzePartitionCommand") {
val plan = sql(s"ANALYZE TABLE $reusedPartTable" +
s" PARTITION (pid = 1) COMPUTE STATISTICS").queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === ANALYZE_TABLE)
assert(operationType === ALTERTABLE_PROPERTIES)
assert(in.size === 1)
val po0 = in.head
@ -411,9 +422,19 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(po0.columns === Seq("pid"))
checkTableOwner(po0)
val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
assert(accessType0 === AccessType.SELECT)
assert(accessType0 === AccessType.ALTER)
assert(out.size === 0)
assert(out.size === 1)
val po1 = out.head
assert(po1.actionType === PrivilegeObjectActionType.OTHER)
assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assertEqualsIgnoreCase(reusedDb)(po1.dbname)
assertEqualsIgnoreCase(reusedPartTableShort)(po1.objectName)
// ignore this check as it behaves differently across spark versions
assert(po1.columns.isEmpty)
checkTableOwner(po1)
val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
assert(accessType1 === AccessType.ALTER)
}
test("AnalyzeTableCommand") {
@ -421,7 +442,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
.queryExecution.analyzed
val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
assert(operationType === ANALYZE_TABLE)
assert(operationType === ALTERTABLE_PROPERTIES)
assert(in.size === 1)
val po0 = in.head
assert(po0.actionType === PrivilegeObjectActionType.OTHER)
@ -432,9 +453,19 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(po0.columns.isEmpty)
checkTableOwner(po0)
val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
assert(accessType0 === AccessType.SELECT)
assert(accessType0 === AccessType.ALTER)
assert(out.size === 0)
assert(out.size === 1)
val po1 = out.head
assert(po1.actionType === PrivilegeObjectActionType.OTHER)
assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
assertEqualsIgnoreCase(reusedDb)(po1.dbname)
assertEqualsIgnoreCase(reusedPartTableShort)(po1.objectName)
// ignore this check as it behaves differently across spark versions
assert(po1.columns.isEmpty)
checkTableOwner(po1)
val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
assert(accessType1 === AccessType.ALTER)
}
test("AnalyzeTablesCommand") {

View File

@ -181,7 +181,8 @@ object TableCommands {
val cd2 = cd1.copy(fieldExtractor = classOf[StringSeqOptionColumnExtractor])
val td1 = tableIdentDesc.copy(columnDesc = Some(cd1), isInput = true)
val td2 = td1.copy(columnDesc = Some(cd2))
TableCommandSpec(cmd, Seq(td1, td2), ANALYZE_TABLE)
// AnalyzeColumn will update table properties, here we use ALTERTABLE_PROPERTIES
TableCommandSpec(cmd, Seq(tableIdentDesc, td1, td2), ALTERTABLE_PROPERTIES)
}
val AnalyzePartition = {
@ -189,16 +190,18 @@ object TableCommands {
val columnDesc = ColumnDesc("partitionSpec", classOf[PartitionColumnExtractor])
TableCommandSpec(
cmd,
Seq(tableIdentDesc.copy(columnDesc = Some(columnDesc), isInput = true)),
ANALYZE_TABLE)
// AnalyzePartition will update table properties, here we use ALTERTABLE_PROPERTIES
Seq(tableIdentDesc, tableIdentDesc.copy(columnDesc = Some(columnDesc), isInput = true)),
ALTERTABLE_PROPERTIES)
}
val AnalyzeTable = {
val cmd = "org.apache.spark.sql.execution.command.AnalyzeTableCommand"
TableCommandSpec(
cmd,
Seq(tableIdentDesc.copy(isInput = true)),
ANALYZE_TABLE)
// AnalyzeTable will update table properties, here we use ALTERTABLE_PROPERTIES
Seq(tableIdentDesc, tableIdentDesc.copy(isInput = true)),
ALTERTABLE_PROPERTIES)
}
val CreateTableV2 = {