[KYUUBI #5191] Make Spark extension plugin compilable on Scala 2.13

### _Why are the changes needed?_

- to make Spark extension plugin compilable on Scala 2.13 with Spark 3.2/3.3/3.4 (Spark 3.1 does not support Scala 2.13)
```
mvn clean install -DskipTests -Pflink-provided,hive-provided,spark-provided -Pscala-2.13 -pl :kyuubi-extension-spark-3-2_2.13 -Pspark-3.2 -am

mvn clean install -DskipTests -Pflink-provided,hive-provided,spark-provided -Pscala-2.13 -pl :kyuubi-extension-spark-3-3_2.13 -Pspark-3.3 -am

mvn clean install -DskipTests -Pflink-provided,hive-provided,spark-provided -Pscala-2.13 -pl :kyuubi-extension-spark-3-4_2.13 -Pspark-3.4 -am
```

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request

### _Was this patch authored or co-authored using generative AI tooling?_

Closes #5191 from bowenliang123/scala213-sparkext.

Closes #5191

156ae26c8 [liangbowen] adapt spark ext plugin to Scala 2.13

Authored-by: liangbowen <liangbowen@gf.com.cn>
Signed-off-by: liangbowen <liangbowen@gf.com.cn>
This commit is contained in:
liangbowen 2023-08-23 08:43:11 +08:00
parent 5de3a40690
commit ac624b9ff9
5 changed files with 11 additions and 9 deletions

View File

@ -25,7 +25,7 @@
<relativePath>../../../pom.xml</relativePath>
</parent>
<artifactId>kyuubi-extension-spark-3-4_2.12</artifactId>
<artifactId>kyuubi-extension-spark-3-4_${scala.binary.version}</artifactId>
<packaging>jar</packaging>
<name>Kyuubi Dev Spark Extensions (for Spark 3.4)</name>
<url>https://kyuubi.apache.org/</url>

View File

@ -116,6 +116,7 @@ class KyuubiSparkSQLAstBuilder extends KyuubiSparkSQLBaseVisitor[AnyRef] with SQ
val zorderCols = ctx.zorderClause().order.asScala
.map(visitMultipartIdentifier)
.map(UnresolvedAttribute(_))
.toSeq
val orderExpr =
if (zorderCols.length == 1) {
@ -130,16 +131,16 @@ class KyuubiSparkSQLAstBuilder extends KyuubiSparkSQLBaseVisitor[AnyRef] with SQ
override def visitMultipartIdentifier(ctx: MultipartIdentifierContext): Seq[String] =
withOrigin(ctx) {
ctx.parts.asScala.map(_.getText)
ctx.parts.asScala.map(_.getText).toSeq
}
override def visitZorderClause(ctx: ZorderClauseContext): Seq[UnresolvedAttribute] =
withOrigin(ctx) {
val res = ListBuffer[UnresolvedAttribute]()
ctx.multipartIdentifier().forEach { identifier =>
res += UnresolvedAttribute(identifier.parts.asScala.map(_.getText))
res += UnresolvedAttribute(identifier.parts.asScala.map(_.getText).toSeq)
}
res
res.toSeq
}
private def typedVisit[T](ctx: ParseTree): T = {

View File

@ -376,7 +376,7 @@ trait WatchDogSuiteBase extends KyuubiSparkSQLExtensionTest {
|ORDER BY a
|DESC
|""".stripMargin)
.collect().head.get(0).equals(10))
.collect().head.get(0) === 10)
}
}
}

View File

@ -116,6 +116,7 @@ class KyuubiSparkSQLAstBuilder extends KyuubiSparkSQLBaseVisitor[AnyRef] with SQ
val zorderCols = ctx.zorderClause().order.asScala
.map(visitMultipartIdentifier)
.map(UnresolvedAttribute(_))
.toSeq
val orderExpr =
if (zorderCols.length == 1) {
@ -130,16 +131,16 @@ class KyuubiSparkSQLAstBuilder extends KyuubiSparkSQLBaseVisitor[AnyRef] with SQ
override def visitMultipartIdentifier(ctx: MultipartIdentifierContext): Seq[String] =
withOrigin(ctx) {
ctx.parts.asScala.map(_.getText)
ctx.parts.asScala.map(_.getText).toSeq
}
override def visitZorderClause(ctx: ZorderClauseContext): Seq[UnresolvedAttribute] =
withOrigin(ctx) {
val res = ListBuffer[UnresolvedAttribute]()
ctx.multipartIdentifier().forEach { identifier =>
res += UnresolvedAttribute(identifier.parts.asScala.map(_.getText))
res += UnresolvedAttribute(identifier.parts.asScala.map(_.getText).toSeq)
}
res
res.toSeq
}
private def typedVisit[T](ctx: ParseTree): T = {

View File

@ -376,7 +376,7 @@ trait WatchDogSuiteBase extends KyuubiSparkSQLExtensionTest {
|ORDER BY a
|DESC
|""".stripMargin)
.collect().head.get(0).equals(10))
.collect().head.get(0) === 10)
}
}
}