[KYUUBI #6564] Insert into table check the privilege of table
# 🔍 Description ## Issue References 🔗 This pull request fixes #6564 ## Describe Your Solution 🔧 Remove the `columnDesc` for `InsertIntoHadoopFsRelationCommand ` and `InsertIntoHiveTable ` in `table_command_spec.json` ## Types of changes 🔖 - [ ] Bugfix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [x] Breaking change (fix or feature that would cause existing functionality to change) ## Test Plan 🧪 #### Behavior Without This Pull Request ⚰️ Insert into table will check the privilege of columns. #### Behavior With This Pull Request 🎉 Insert into table will check the privilege of table. #### Related Unit Tests --- # Checklist 📝 - [ ] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html) **Be nice. Be informative.** Closes #6570 from liujiayi771/insert-permission. Closes #6564 d956aa916 [joey.ljy] Fix ut d282f8ec5 [joey.ljy] insert into table check the privilege of table Authored-by: joey.ljy <joey.ljy@alibaba-inc.com> Signed-off-by: Cheng Pan <chengpan@apache.org>
This commit is contained in:
parent
93285f1fdb
commit
80c8e38066
@ -1628,11 +1628,7 @@
|
||||
"tableDescs" : [ {
|
||||
"fieldName" : "catalogTable",
|
||||
"fieldExtractor" : "CatalogTableOptionTableExtractor",
|
||||
"columnDesc" : {
|
||||
"fieldName" : "outputColumnNames",
|
||||
"fieldExtractor" : "StringSeqColumnExtractor",
|
||||
"comment" : ""
|
||||
},
|
||||
"columnDesc" : null,
|
||||
"actionTypeDesc" : {
|
||||
"fieldName" : "mode",
|
||||
"fieldExtractor" : "SaveModeActionTypeExtractor",
|
||||
@ -1732,11 +1728,7 @@
|
||||
"tableDescs" : [ {
|
||||
"fieldName" : "table",
|
||||
"fieldExtractor" : "CatalogTableTableExtractor",
|
||||
"columnDesc" : {
|
||||
"fieldName" : "outputColumnNames",
|
||||
"fieldExtractor" : "StringSeqColumnExtractor",
|
||||
"comment" : ""
|
||||
},
|
||||
"columnDesc" : null,
|
||||
"actionTypeDesc" : {
|
||||
"fieldName" : "overwrite",
|
||||
"fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
|
||||
|
||||
@ -1571,7 +1571,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
|
||||
assert(po.catalog.isEmpty)
|
||||
assertEqualsIgnoreCase(defaultDb)(po.dbname)
|
||||
assertEqualsIgnoreCase(tableName)(po.objectName)
|
||||
assert(po.columns === Seq("a", "b"))
|
||||
checkTableOwner(po)
|
||||
val accessType = ranger.AccessType(po, operationType, isInput = false)
|
||||
assert(accessType === AccessType.UPDATE)
|
||||
@ -1658,7 +1657,6 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
|
||||
assert(po.catalog.isEmpty)
|
||||
assertEqualsIgnoreCase(defaultDb)(po.dbname)
|
||||
assertEqualsIgnoreCase(tableName)(po.objectName)
|
||||
assert(po.columns === Seq("a", "b"))
|
||||
checkTableOwner(po)
|
||||
val accessType = ranger.AccessType(po, operationType, isInput = false)
|
||||
assert(accessType === AccessType.UPDATE)
|
||||
|
||||
@ -573,12 +573,10 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
|
||||
val InsertIntoHiveTable = {
|
||||
val cmd = "org.apache.spark.sql.hive.execution.InsertIntoHiveTable"
|
||||
val actionTypeDesc = overwriteActionTypeDesc
|
||||
val columnDesc = ColumnDesc("outputColumnNames", classOf[StringSeqColumnExtractor])
|
||||
val tableDesc = TableDesc(
|
||||
"table",
|
||||
classOf[CatalogTableTableExtractor],
|
||||
Some(columnDesc),
|
||||
Some(actionTypeDesc))
|
||||
actionTypeDesc = Some(actionTypeDesc))
|
||||
val queryDesc = queryQueryDesc
|
||||
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
|
||||
}
|
||||
@ -599,11 +597,9 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
|
||||
val InsertIntoHadoopFsRelationCommand = {
|
||||
val cmd = "org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand"
|
||||
val actionTypeDesc = ActionTypeDesc("mode", classOf[SaveModeActionTypeExtractor])
|
||||
val columnDesc = ColumnDesc("outputColumnNames", classOf[StringSeqColumnExtractor])
|
||||
val tableDesc = TableDesc(
|
||||
"catalogTable",
|
||||
classOf[CatalogTableOptionTableExtractor],
|
||||
Some(columnDesc),
|
||||
actionTypeDesc = Some(actionTypeDesc))
|
||||
val queryDesc = queryQueryDesc
|
||||
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
|
||||
|
||||
@ -644,8 +644,7 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
|
||||
s" [select] privilege on" +
|
||||
s" [$db1/$srcTable1/city,$db1/$srcTable1/id,$db1/$srcTable1/name," +
|
||||
s"$db1/$srcTable2/age,$db1/$srcTable2/id]," +
|
||||
s" [update] privilege on [$db1/$sinkTable1/id,$db1/$sinkTable1/age," +
|
||||
s"$db1/$sinkTable1/name,$db1/$sinkTable1/city]"))
|
||||
s" [update] privilege on [$db1/$sinkTable1]"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user