From 35669b375a78d855c5bd33577e9928be1606fb5c Mon Sep 17 00:00:00 2001 From: liangbowen Date: Thu, 24 Nov 2022 10:06:07 +0800 Subject: [PATCH] [KYUUBI #3805] [FOLLOWUP] transform `PySparkTests` from trait to class to enable @PySparkTest ### _Why are the changes needed?_ Annotation PySparkTest introduced in is not working on trait `PySparkTests`. In order to close #3805, transforming `PySparkTests` from trait to class, to enable PySparkTest for skipping tests in nighty builds correctly. ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [ ] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #3846 from bowenliang123/3805-followup. Closes #3805 6aacbea84 [liangbowen] make the type of`PySparkTests` from trait to class, in order to enable annotation @PySparkTest for skipping tests Authored-by: liangbowen Signed-off-by: Fu Chen --- .../apache/kyuubi/engine/spark/operation/PySparkTests.scala | 5 ++++- .../kyuubi/engine/spark/operation/SparkOperationSuite.scala | 3 +-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/PySparkTests.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/PySparkTests.scala index 1e8920755..115809c6a 100644 --- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/PySparkTests.scala +++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/PySparkTests.scala @@ -27,7 +27,10 @@ import org.apache.kyuubi.operation.HiveJDBCTestHelper import org.apache.kyuubi.tags.PySparkTest @PySparkTest -trait PySparkTests extends WithSparkSQLEngine with HiveJDBCTestHelper { +class PySparkTests extends WithSparkSQLEngine with HiveJDBCTestHelper { + + override protected def jdbcUrl: String = getJdbcUrl + override def withKyuubiConf: Map[String, String] = Map.empty test("pyspark support") { val code = "print(1)" diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala index b58d39e73..8d3e1d7ac 100644 --- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala +++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala @@ -40,8 +40,7 @@ import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._ import org.apache.kyuubi.util.KyuubiHadoopUtils import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast -class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with SparkQueryTests - with PySparkTests { +class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with SparkQueryTests { override protected def jdbcUrl: String = getJdbcUrl override def withKyuubiConf: Map[String, String] = Map.empty