[KYUUBI #3671] [TEST] assert error message for SCHEMA_NOT_FOUND and TABLE_OR_VIEW_NOT_FOUND for Spark 3.4

### _Why are the changes needed?_

to close #3671.

- fix assert message for '[SCHEMA_NOT_FOUND]' and '[TABLE_OR_VIEW_NOT_FOUND]'
- introduce SparkVersionUtil in kyuubi-common test for checking Spark version

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request

Closes #3676 from bowenliang123/3671-notfound.

Closes #3671

c505098f [Bowen Liang] update
a950abbf [Bowen Liang] fix typo
56464e84 [Bowen Liang] update SparkVersionUtil
2190a93e [Bowen Liang] fix assert message for '[SCHEMA_NOT_FOUND]' and '[TABLE_OR_VIEW_NOT_FOUND]', introduce SparkVersionUtil for checking spark version in kyuubi-common test

Authored-by: Bowen Liang <liangbowen@gf.com.cn>
Signed-off-by: ulysses-you <ulyssesyou@apache.org>
This commit is contained in:
Bowen Liang 2022-10-21 12:15:55 +08:00 committed by ulysses-you
parent 79512a1835
commit e00e380286
No known key found for this signature in database
GPG Key ID: 4C500BC62D576766
4 changed files with 55 additions and 12 deletions

View File

@ -38,6 +38,7 @@ import org.apache.kyuubi.engine.spark.shim.SparkCatalogShim
import org.apache.kyuubi.operation.{HiveMetadataTests, SparkQueryTests}
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
import org.apache.kyuubi.util.KyuubiHadoopUtils
import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast
class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with SparkQueryTests {
@ -457,12 +458,19 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
val req = new TOpenSessionReq()
req.setUsername("kentyao")
req.setPassword("anonymous")
val conf = Map("use:database" -> "default2")
val dbName = "default2"
val conf = Map("use:database" -> dbName)
req.setConfiguration(conf.asJava)
val tOpenSessionResp = client.OpenSession(req)
val status = tOpenSessionResp.getStatus
val errorMessage = status.getErrorMessage
assert(status.getStatusCode === TStatusCode.ERROR_STATUS)
assert(status.getErrorMessage.contains("Database 'default2' not found"))
if (isSparkVersionAtLeast("3.4")) {
assert(errorMessage.contains("[SCHEMA_NOT_FOUND]"))
assert(errorMessage.contains(s"The schema `$dbName` cannot be found."))
} else {
assert(errorMessage.contains(s"Database '$dbName' not found"))
}
}
}

View File

@ -17,16 +17,12 @@
package org.apache.kyuubi.operation
import org.apache.kyuubi.{IcebergSuiteMixin, SPARK_COMPILE_VERSION}
import org.apache.kyuubi.engine.SemanticVersion
import org.apache.kyuubi.IcebergSuiteMixin
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast
trait IcebergMetadataTests extends HiveJDBCTestHelper with IcebergSuiteMixin {
def isSparkVersionAtLeast(ver: String): Boolean = {
SemanticVersion(SPARK_COMPILE_VERSION).isVersionAtLeast(ver)
}
test("get catalogs") {
withJdbcStatement() { statement =>
val metaData = statement.getConnection.getMetaData

View File

@ -29,6 +29,7 @@ import org.apache.hive.service.rpc.thrift.{TExecuteStatementReq, TFetchResultsRe
import org.apache.kyuubi.{KYUUBI_VERSION, Utils}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.engine.SemanticVersion
import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast
trait SparkQueryTests extends HiveJDBCTestHelper {
@ -469,11 +470,20 @@ trait SparkQueryTests extends HiveJDBCTestHelper {
}
test("KYUUBI #1059: Plan only operations") {
val ddl = "create table t(a int) using parquet"
val dql = "select * from t"
val tableName = "t"
val ddl = s"create table $tableName(a int) using parquet"
val dql = s"select * from $tableName"
val setkey = "SET kyuubi.operation.plan.only.mode"
withJdbcStatement("t") { statement =>
try {
val assertTableOrViewNotfound: (Exception, String) => Unit = (e, tableName) => {
if (isSparkVersionAtLeast("3.4")) {
assert(e.getMessage.contains("[TABLE_OR_VIEW_NOT_FOUND]"))
assert(e.getMessage.contains(s"The table or view `$tableName` cannot be found."))
} else {
assert(e.getMessage.contains("Table or view not found"))
}
}
statement.execute("SET kyuubi.operation.plan.only.mode=optimize")
val set = statement.executeQuery(ddl)
assert(set.next())
@ -482,10 +492,10 @@ trait SparkQueryTests extends HiveJDBCTestHelper {
assert(set0.next())
assert(set0.getString(2) === "optimize")
val e1 = intercept[SQLException](statement.executeQuery(dql))
assert(e1.getMessage.contains("Table or view not found"))
assertTableOrViewNotfound(e1, tableName)
statement.execute("SET kyuubi.operation.plan.only.mode=analyze")
val e2 = intercept[SQLException](statement.executeQuery(dql))
assert(e2.getMessage.contains("Table or view not found"))
assertTableOrViewNotfound(e2, tableName)
statement.execute("SET kyuubi.operation.plan.only.mode=parse")
val set1 = statement.executeQuery(dql)
assert(set1.next())

View File

@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kyuubi.util
import org.apache.kyuubi.SPARK_COMPILE_VERSION
import org.apache.kyuubi.engine.SemanticVersion
object SparkVersionUtil {
lazy val sparkSemanticVersion: SemanticVersion = SemanticVersion(SPARK_COMPILE_VERSION)
def isSparkVersionAtLeast(ver: String): Boolean = {
sparkSemanticVersion.isVersionAtLeast(ver)
}
}