diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index c3cc53736..f590ea267 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -60,17 +60,17 @@ jobs: - java: 8 spark: '3.4' spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.3 -Dspark.archive.name=spark-3.1.3-bin-hadoop3.2.tgz -Pzookeeper-3.6' - exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.SparkLocalClusterTest' + exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest' comment: 'verify-on-spark-3.1-binary' - java: 8 spark: '3.4' spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.2.4 -Dspark.archive.name=spark-3.2.4-bin-hadoop3.2.tgz -Pzookeeper-3.6' - exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.SparkLocalClusterTest' + exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest' comment: 'verify-on-spark-3.2-binary' - java: 8 spark: '3.4' spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.3.3 -Dspark.archive.name=spark-3.3.3-bin-hadoop3.tgz -Pzookeeper-3.6' - exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.SparkLocalClusterTest' + exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest' comment: 'verify-on-spark-3.3-binary' - java: 8 spark: '3.4' @@ -108,7 +108,7 @@ jobs: run: | TEST_MODULES="dev/kyuubi-codecov" ./build/mvn clean install ${MVN_OPT} -pl ${TEST_MODULES} -am \ - -Pspark-${{ matrix.spark }} ${{ matrix.spark-archive }} ${{ matrix.exclude-tags }} + -Pspark-${{ matrix.spark }} -Pspark-authz-hudi-test ${{ matrix.spark-archive }} ${{ matrix.exclude-tags }} - name: Code coverage if: | matrix.java == 8 && diff --git a/extensions/spark/kyuubi-spark-authz/pom.xml b/extensions/spark/kyuubi-spark-authz/pom.xml index 1ae63fcb3..97145e514 100644 --- a/extensions/spark/kyuubi-spark-authz/pom.xml +++ b/extensions/spark/kyuubi-spark-authz/pom.xml @@ -336,6 +336,23 @@ + + + spark-authz-hudi-test + + + org.apache.hudi + hudi-spark${hudi.spark.binary.version}-bundle_${scala.binary.version} + ${hudi.version} + test + + + + gen-policy diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index 06d76c7e5..2febac11b 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -1409,4 +1409,101 @@ "fieldName" : "query", "fieldExtractor" : "LogicalPlanQueryExtractor" } ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.AlterHoodieTableAddColumnsCommand", + "tableDescs" : [ { + "fieldName" : "tableId", + "fieldExtractor" : "TableIdentifierTableExtractor", + "columnDesc" : { + "fieldName" : "colsToAdd", + "fieldExtractor" : "StructFieldSeqColumnExtractor" + }, + "actionTypeDesc" : null, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "ALTERTABLE_ADDCOLS", + "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.AlterHoodieTableChangeColumnCommand", + "tableDescs" : [ { + "fieldName" : "tableIdentifier", + "fieldExtractor" : "TableIdentifierTableExtractor", + "columnDesc" : { + "fieldName" : "columnName", + "fieldExtractor" : "StringColumnExtractor" + }, + "actionTypeDesc" : null, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "ALTERTABLE_REPLACECOLS", + "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.AlterHoodieTableDropPartitionCommand", + "tableDescs" : [ { + "fieldName" : "tableIdentifier", + "fieldExtractor" : "TableIdentifierTableExtractor", + "columnDesc" : { + "fieldName" : "partitionSpecs", + "fieldExtractor" : "PartitionSeqColumnExtractor" + }, + "actionTypeDesc" : null, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "ALTERTABLE_DROPPARTS", + "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.AlterHoodieTableRenameCommand", + "tableDescs" : [ { + "fieldName" : "oldName", + "fieldExtractor" : "TableIdentifierTableExtractor", + "columnDesc" : null, + "actionTypeDesc" : null, + "tableTypeDesc" : { + "fieldName" : "oldName", + "fieldExtractor" : "TableIdentifierTableTypeExtractor", + "skipTypes" : [ "TEMP_VIEW" ] + }, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "ALTERTABLE_RENAME", + "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.AlterTableCommand", + "tableDescs" : [ { + "fieldName" : "table", + "fieldExtractor" : "CatalogTableTableExtractor", + "columnDesc" : null, + "actionTypeDesc" : null, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "ALTERTABLE_PROPERTIES", + "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.Spark31AlterTableCommand", + "tableDescs" : [ { + "fieldName" : "table", + "fieldExtractor" : "CatalogTableTableExtractor", + "columnDesc" : null, + "actionTypeDesc" : null, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "ALTERTABLE_PROPERTIES", + "queryDescs" : [ ] } ] \ No newline at end of file diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala index e95ff91ed..2477c9e45 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala @@ -86,6 +86,12 @@ private[authz] object AuthZUtils { lazy val SPARK_RUNTIME_VERSION: SemanticVersion = SemanticVersion(SPARK_VERSION) lazy val isSparkV32OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.2" lazy val isSparkV33OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.3" + lazy val isSparkV34OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.4" + lazy val isSparkV35OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.5" + + lazy val SCALA_RUNTIME_VERSION: SemanticVersion = + SemanticVersion(scala.util.Properties.versionNumberString) + lazy val isScalaV213: Boolean = SCALA_RUNTIME_VERSION >= "2.13" def quoteIfNeeded(part: String): String = { if (part.matches("[a-zA-Z0-9_]+") && !part.matches("\\d+")) { diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala index 2297f73f9..0b1df64da 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala @@ -40,6 +40,7 @@ object RangerTestNamespace { val defaultDb = "default" val sparkCatalog = "spark_catalog" val icebergNamespace = "iceberg_ns" + val hudiNamespace = "hudi_ns" val namespace1 = "ns1" val namespace2 = "ns2" } diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala new file mode 100644 index 000000000..6e3237d2a --- /dev/null +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kyuubi.plugin.spark.authz.gen + +import org.apache.kyuubi.plugin.spark.authz.OperationType._ +import org.apache.kyuubi.plugin.spark.authz.serde._ +import org.apache.kyuubi.plugin.spark.authz.serde.TableType._ + +object HudiCommands { + val AlterHoodieTableAddColumnsCommand = { + val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableAddColumnsCommand" + val columnDesc = ColumnDesc("colsToAdd", classOf[StructFieldSeqColumnExtractor]) + val tableDesc = TableDesc("tableId", classOf[TableIdentifierTableExtractor], Some(columnDesc)) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_ADDCOLS) + } + + val AlterHoodieTableChangeColumnCommand = { + val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableChangeColumnCommand" + val columnDesc = ColumnDesc("columnName", classOf[StringColumnExtractor]) + val tableDesc = + TableDesc("tableIdentifier", classOf[TableIdentifierTableExtractor], Some(columnDesc)) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_REPLACECOLS) + } + + val AlterHoodieTableDropPartitionCommand = { + val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableDropPartitionCommand" + val columnDesc = ColumnDesc("partitionSpecs", classOf[PartitionSeqColumnExtractor]) + val tableDesc = + TableDesc("tableIdentifier", classOf[TableIdentifierTableExtractor], Some(columnDesc)) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_DROPPARTS) + } + + val AlterHoodieTableRenameCommand = { + val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableRenameCommand" + val oldTableTableTypeDesc = + TableTypeDesc( + "oldName", + classOf[TableIdentifierTableTypeExtractor], + Seq(TEMP_VIEW)) + val oldTableD = TableDesc( + "oldName", + classOf[TableIdentifierTableExtractor], + tableTypeDesc = Some(oldTableTableTypeDesc)) + + TableCommandSpec(cmd, Seq(oldTableD), ALTERTABLE_RENAME) + } + + val AlterTableCommand = { + val cmd = "org.apache.spark.sql.hudi.command.AlterTableCommand" + val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], None) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES) + } + + val Spark31AlterTableCommand = { + val cmd = "org.apache.spark.sql.hudi.command.Spark31AlterTableCommand" + val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], None) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES) + } + + val data: Array[TableCommandSpec] = Array( + AlterHoodieTableAddColumnsCommand, + AlterHoodieTableChangeColumnCommand, + AlterHoodieTableDropPartitionCommand, + AlterHoodieTableRenameCommand, + AlterTableCommand, + Spark31AlterTableCommand) +} diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala index 855e25e87..1b2d330d1 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala @@ -43,7 +43,7 @@ class JsonSpecFileGenerator extends AnyFunSuite { // scalastyle:on test("check spec json files") { writeCommandSpecJson("database", DatabaseCommands.data) - writeCommandSpecJson("table", TableCommands.data ++ IcebergCommands.data) + writeCommandSpecJson("table", TableCommands.data ++ IcebergCommands.data ++ HudiCommands.data) writeCommandSpecJson("function", FunctionCommands.data) writeCommandSpecJson("scan", Scans.data) } diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala new file mode 100644 index 000000000..8fcae6cf9 --- /dev/null +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.kyuubi.plugin.spark.authz.ranger + +import org.apache.spark.SparkConf +import org.scalatest.Outcome + +import org.apache.kyuubi.Utils +import org.apache.kyuubi.plugin.spark.authz.AccessControlException +import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._ +import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._ +import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._ +import org.apache.kyuubi.tags.HudiTest +import org.apache.kyuubi.util.AssertionUtils.interceptContains + +/** + * Tests for RangerSparkExtensionSuite on Hudi SQL. + * Run this test should enbale `hudi` profile. + */ +@HudiTest +class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { + override protected val catalogImpl: String = "hive" + // TODO: Apache Hudi not support Spark 3.5 and Scala 2.13 yet, + // should change after Apache Hudi support Spark 3.5 and Scala 2.13. + private def isSupportedVersion = !isSparkV35OrGreater && !isScalaV213 + + override protected val sqlExtensions: String = + if (isSupportedVersion) { + "org.apache.spark.sql.hudi.HoodieSparkSessionExtension" + } else { + "" + } + + override protected val extraSparkConf: SparkConf = + new SparkConf() + .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") + + val namespace1 = hudiNamespace + val table1 = "table1_hoodie" + val table2 = "table2_hoodie" + val outputTable1 = "outputTable_hoodie" + + override def withFixture(test: NoArgTest): Outcome = { + assume(isSupportedVersion) + test() + } + + override def beforeAll(): Unit = { + if (isSupportedVersion) { + if (isSparkV32OrGreater) { + spark.conf.set( + s"spark.sql.catalog.$sparkCatalog", + "org.apache.spark.sql.hudi.catalog.HoodieCatalog") + spark.conf.set(s"spark.sql.catalog.$sparkCatalog.type", "hadoop") + spark.conf.set( + s"spark.sql.catalog.$sparkCatalog.warehouse", + Utils.createTempDir("hudi-hadoop").toString) + } + super.beforeAll() + } + } + + override def afterAll(): Unit = { + if (isSupportedVersion) { + super.afterAll() + spark.sessionState.catalog.reset() + spark.sessionState.conf.clear() + } + } + + test("AlterTableCommand") { + withCleanTmpResources(Seq((s"$namespace1.$table1", "table"), (namespace1, "database"))) { + doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1")) + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table1(id int, name string, city string) + |USING hudi + |OPTIONS ( + | type = 'cow', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |""".stripMargin)) + + // AlterHoodieTableAddColumnsCommand + interceptContains[AccessControlException]( + doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 ADD COLUMNS(age int)")))( + s"does not have [alter] privilege on [$namespace1/$table1/age]") + + // AlterHoodieTableChangeColumnCommand + interceptContains[AccessControlException]( + doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 CHANGE COLUMN id id bigint")))( + s"does not have [alter] privilege" + + s" on [$namespace1/$table1/id]") + + // AlterHoodieTableDropPartitionCommand + interceptContains[AccessControlException]( + doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 DROP PARTITION (city='test')")))( + s"does not have [alter] privilege" + + s" on [$namespace1/$table1/city]") + + // AlterHoodieTableRenameCommand + interceptContains[AccessControlException]( + doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 RENAME TO $namespace1.$table2")))( + s"does not have [alter] privilege" + + s" on [$namespace1/$table1]") + + // AlterTableCommand && Spark31AlterTableCommand + sql("set hoodie.schema.on.read.enable=true") + interceptContains[AccessControlException]( + doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 ADD COLUMNS(age int)")))( + s"does not have [alter] privilege on [$namespace1/$table1]") + } + } +} diff --git a/kyuubi-util-scala/src/test/java/org/apache/kyuubi/tags/HudiTest.java b/kyuubi-util-scala/src/test/java/org/apache/kyuubi/tags/HudiTest.java new file mode 100644 index 000000000..346f146fa --- /dev/null +++ b/kyuubi-util-scala/src/test/java/org/apache/kyuubi/tags/HudiTest.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kyuubi.tags; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.scalatest.TagAnnotation; + +@TagAnnotation +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD, ElementType.TYPE}) +public @interface HudiTest {} diff --git a/pom.xml b/pom.xml index df1e0c3b7..5773eda53 100644 --- a/pom.xml +++ b/pom.xml @@ -158,6 +158,8 @@ false 4.5.14 4.4.16 + 0.14.0 + ${spark.binary.version} 1.4.0 2.15.0 4.0.4 @@ -234,7 +236,7 @@ 1.12.1 4.8.0 2.2.0 - org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest + org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest false 2.30.0 @@ -1475,6 +1477,12 @@ threeten-extra ${threeten.version} + + + org.apache.hudi + hudi-spark${hudi.spark.binary.version}-bundle_${scala.binary.version} + ${hudi.version} + @@ -2239,7 +2247,7 @@ 2.4.0 3.4.1 3.4 - org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest + org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest @@ -2250,6 +2258,8 @@ 2.4.0 + + 3.4 3.5.0 3.5 org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PySparkTest @@ -2260,7 +2270,7 @@ spark-master 4.0.0-SNAPSHOT - org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PySparkTest + org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.PySparkTest