diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveScan.scala b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveScan.scala index 0b79d7307..ecdfc76c5 100644 --- a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveScan.scala +++ b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveScan.scala @@ -64,7 +64,7 @@ case class HiveScan( } override def createReaderFactory(): PartitionReaderFactory = { - val hiveConf = fileIndex.hiveCatalog.hadoopConfiguration() + val hiveConf = new Configuration(fileIndex.hiveCatalog.hadoopConfiguration()) addCatalogTableConfToConf(hiveConf, catalogTable) val table = HiveClientImpl.toHiveTable(catalogTable) diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala index 1d3d5ae10..0dd1efdec 100644 --- a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala +++ b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala @@ -175,6 +175,23 @@ class HiveQuerySuite extends KyuubiHiveTest { } } + test("[KYUUBI #5414] Reader should not polluted the global hiveconf instance") { + withSparkSession() { spark => + val table = "hive.default.hiveconf_test" + withTempPartitionedTable(spark, table, "ORC", hiveTable = true) { + spark.sql( + s""" + | INSERT OVERWRITE + | $table PARTITION(year = '2022') + | VALUES("yi", "08") + |""".stripMargin).collect() + + checkQueryResult(s"select * from $table", spark, Array(Row.apply("yi", "2022", "08"))) + checkQueryResult(s"select count(*) as c from $table", spark, Array(Row.apply(1))) + } + } + } + test("Partitioned table insert and static partition value is empty string") { withSparkSession() { spark => val table = "hive.default.employee"