[KYUUBI #6984] Fix ValueError when rendering MapType data

[
[KYUUBI #6984] Fix ValueError when rendering MapType data
](https://github.com/apache/kyuubi/issues/6984)

### Why are the changes needed?
The issue was caused by an incorrect iteration of MapType data in the `%table` magic command. When iterating over a `MapType` column, the code used `for k, v in m` directly, which leads to a `ValueError` because raw `Map` entries may not be properly unpacked

### How was this patch tested?
- [x] Manual testing:
  Executed a query with a `MapType` column and confirmed that the `%table` command now renders it without errors.
```python
 from pyspark.sql import SparkSession
 from pyspark.sql.types import MapType, StringType, IntegerType
 spark = SparkSession.builder \
     .appName("MapFieldExample") \
     .getOrCreate()

 data = [
     (1, {"a": "1", "b": "2"}),
     (2, {"x": "10"}),
     (3, {"key": "value"})
 ]

 schema = "id INT, map_col MAP<STRING, STRING>"
 df = spark.createDataFrame(data, schema=schema)
 df.printSchema()
 df2=df.collect()
```
using `%table` render table
```python
 %table df2
```

result
```python
{'application/vnd.livy.table.v1+json': {'headers': [{'name': 'id', 'type': 'INT_TYPE'}, {'name': 'map_col', 'type': 'MAP_TYPE'}], 'data': [[1, {'a': '1', 'b': '2'}], [2, {'x': '10'}], [3, {'key': 'value'}]]}}

```

### Was this patch authored or co-authored using generative AI tooling?
No

**notice** This PR was co-authored by DeepSeek-R1.

Closes #6985 from JustFeng/patch-1.

Closes #6984

e0911ba94 [Reese Feng] Update PySparkTests for magic cmd
bc3ce1a49 [Reese Feng] Update PySparkTests for magic cmd
200d7ad9b [Reese Feng] Fix syntax error in dict iteration in magic_table_convert_map

Authored-by: Reese Feng <10377945+JustFeng@users.noreply.github.com>
Signed-off-by: Wang, Fei <fwang12@ebay.com>
This commit is contained in:
Reese Feng 2025-03-19 21:18:35 -07:00 committed by Wang, Fei
parent cb36e748ed
commit a54ee39ab3
2 changed files with 8 additions and 5 deletions

View File

@ -314,7 +314,7 @@ def magic_table_convert_map(m):
last_value_type = None
converted_items = {}
for key, value in m:
for key, value in m.items():
key_type, key = magic_table_convert(key)
value_type, value = magic_table_convert(value)

View File

@ -137,22 +137,25 @@ class PySparkTests extends WithKyuubiServer with HiveJDBCTestHelper {
withSessionConf()(Map(KyuubiConf.ENGINE_SPARK_PYTHON_MAGIC_ENABLED.key -> "true"))() {
withMultipleConnectionJdbcStatement()({ stmt =>
val statement = stmt.asInstanceOf[KyuubiStatement]
statement.executePython("x = [[1, 'a'], [3, 'b']]")
statement.executePython("x = [[1, 'a', {'k1':'v1'}], [3, 'b', {'k2':'v2'}]]")
val resultSet1 = statement.executePython("%json x")
assert(resultSet1.next())
val output1 = resultSet1.getString("output")
assert(output1 == "{\"application/json\":[[1,\"a\"],[3,\"b\"]]}")
assert(output1 == """{"application/json":[[1,"a",{"k1":"v1"}],[3,"b",{"k2":"v2"}]]}""")
val resultSet2 = statement.executePython("%table x")
assert(resultSet2.next())
val output2 = resultSet2.getString("output")
assert(output2 == "{\"application/vnd.livy.table.v1+json\":{" +
"\"headers\":[" +
"{\"name\":\"0\",\"type\":\"INT_TYPE\"},{\"name\":\"1\",\"type\":\"STRING_TYPE\"}" +
"{\"name\":\"0\",\"type\":\"INT_TYPE\"}," +
"{\"name\":\"1\",\"type\":\"STRING_TYPE\"}," +
"{\"name\":\"2\",\"type\":\"MAP_TYPE\"}" +
"]," +
"\"data\":[" +
"[1,\"a\"],[3,\"b\"]" +
"[1,\"a\",{\"k1\":\"v1\"}]," +
"[3,\"b\",{\"k2\":\"v2\"}]" +
"]}}")
Seq("table", "json", "matplot").foreach { magic =>