[KYUUBI #722] [TEST] Use in-memory catalog in most test suites

<!--
Thanks for sending a pull request!

Here are some tips for you:
  1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html
  2. If the PR is related to an issue in https://github.com/NetEase/kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'.
  3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'.
-->

### _Why are the changes needed?_
<!--
Please clarify why the changes are needed. For instance,
  1. If you add a feature, you can talk about the use case of it.
  2. If you fix a bug, you can clarify why it is a bug.
-->
Followup #707, use in-memory catalog in most test suites to avoid derby lock to speed up test suites.

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/latest/tools/testing.html#running-tests) locally before make a pull request

Closes #722 from pan3793/catalog.

Closes #722

fcc38af6 [Cheng Pan] fix ut
0c131629 [Cheng Pan] trait HiveJDBCTests extends BasicJDBCTests
19bb4237 [Cheng Pan] [TEST] Use in-memory catalog in most test suites

Authored-by: Cheng Pan <379377944@qq.com>
Signed-off-by: Kent Yao <yao@apache.org>
This commit is contained in:
Cheng Pan 2021-06-29 21:51:00 +08:00 committed by Kent Yao
parent 164bd03799
commit 0eb9bdc376
No known key found for this signature in database
GPG Key ID: F7051850A0AF904D
6 changed files with 164 additions and 28 deletions

View File

@ -33,10 +33,10 @@ import org.apache.spark.sql.types._
import org.apache.kyuubi.Utils
import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
import org.apache.kyuubi.engine.spark.shim.SparkCatalogShim
import org.apache.kyuubi.operation.JDBCTests
import org.apache.kyuubi.operation.HiveJDBCTests
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
class SparkOperationSuite extends WithSparkSQLEngine with JDBCTests {
class SparkOperationSuite extends WithSparkSQLEngine with HiveJDBCTests {
override protected def jdbcUrl: String = getJdbcUrl
override def withKyuubiConf: Map[String, String] = Map.empty
@ -81,7 +81,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with JDBCTests {
|CREATE TABLE IF NOT EXISTS $dftSchema.$tableName (
| ${schema.toDDL}
|)
|using parquet""".stripMargin
|USING parquet""".stripMargin
withJdbcStatement(tableName) { statement =>
statement.execute(ddl)
@ -148,7 +148,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with JDBCTests {
test("get columns operation should handle interval column properly") {
val viewName = "view_interval"
val ddl = s"CREATE GLOBAL TEMP VIEW $viewName as select interval 1 day as i"
val ddl = s"CREATE GLOBAL TEMP VIEW $viewName AS SELECT INTERVAL 1 DAY AS i"
withJdbcStatement(viewName) { statement =>
statement.execute(ddl)
@ -175,7 +175,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with JDBCTests {
test("handling null in view for get columns operations") {
val viewName = "view_null"
val ddl = s"CREATE GLOBAL TEMP VIEW $viewName as select null as n"
val ddl = s"CREATE GLOBAL TEMP VIEW $viewName AS SELECT NULL AS n"
withJdbcStatement(viewName) { statement =>
statement.execute(ddl)

View File

@ -17,7 +17,6 @@
package org.apache.kyuubi.operation
import org.apache.kyuubi.Utils
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
trait BasicJDBCTests extends JDBCTestUtils {
@ -59,18 +58,14 @@ trait BasicJDBCTests extends JDBCTestUtils {
test("get tables") {
val table_test = "table_1_test"
val table_external_test = "table_2_test"
val view_test = "view_1_test"
val view_global_test = "view_2_test"
val tables = Seq(table_test, table_external_test, view_test, view_global_test)
val schemas = Seq("default", "default", "default", "global_temp")
val tableTypes = Seq("TABLE", "TABLE", "VIEW", "VIEW")
val tables = Seq(table_test, view_test, view_global_test)
val schemas = Seq("default", "default", "global_temp")
val tableTypes = Seq("TABLE", "VIEW", "VIEW")
withJdbcStatement(view_test, view_global_test, table_test, view_test) { statement =>
statement.execute(
s"CREATE TABLE IF NOT EXISTS $table_test(key int) USING parquet COMMENT '$table_test'")
val loc = Utils.createTempDir()
statement.execute(s"CREATE EXTERNAL TABLE IF NOT EXISTS $table_external_test(key int)" +
s" COMMENT '$table_external_test' LOCATION '$loc'")
statement.execute(s"CREATE VIEW IF NOT EXISTS $view_test COMMENT '$view_test'" +
s" AS SELECT * FROM $table_test")
statement.execute(s"CREATE GLOBAL TEMP VIEW $view_global_test" +
@ -88,23 +83,23 @@ trait BasicJDBCTests extends JDBCTestUtils {
assert(rs1.getString(REMARKS) === tables(i).replace(view_global_test, ""))
i += 1
}
assert(i === 4)
assert(i === 3)
val rs2 = metaData.getTables(null, null, null, Array("VIEW"))
i = 2
i = 1
while(rs2.next()) {
assert(rs2.getString(TABLE_NAME) == tables(i))
i += 1
}
assert(i === 4)
assert(i === 3)
val rs3 = metaData.getTables(null, "*", "*", Array("VIEW"))
i = 2
i = 1
while(rs3.next()) {
assert(rs3.getString(TABLE_NAME) == tables(i))
i += 1
}
assert(i === 4)
assert(i === 3)
val rs4 = metaData.getTables(null, null, "table%", Array("VIEW"))
assert(!rs4.next())
@ -118,13 +113,17 @@ trait BasicJDBCTests extends JDBCTestUtils {
assert(rs6.getString(TABLE_NAME) == tables(i))
i += 1
}
assert(i === 2)
assert(i === 1)
val rs7 = metaData.getTables(null, "default", "%", Array("VIEW"))
i = 2
i = 1
while(rs7.next()) {
assert(rs7.getString(TABLE_NAME) == view_test)
}
statement.execute(s"DROP TABLE IF EXISTS ${schemas(0)}.$table_test")
statement.execute(s"DROP VIEW IF EXISTS ${schemas(1)}.$view_test")
statement.execute(s"DROP VIEW IF EXISTS ${schemas(2)}.$view_global_test")
}
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kyuubi.operation
import org.apache.kyuubi.Utils
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
trait HiveJDBCTests extends BasicJDBCTests {
test("get tables - hive catalog") {
val table_test = "table_1_test"
val table_external_test = "table_2_test"
val view_test = "view_1_test"
val view_global_test = "view_2_test"
val tables = Seq(table_test, table_external_test, view_test, view_global_test)
val schemas = Seq("default", "default", "default", "global_temp")
val tableTypes = Seq("TABLE", "TABLE", "VIEW", "VIEW")
withJdbcStatement(view_test, view_global_test, table_test, view_test) { statement =>
statement.execute(
s"CREATE TABLE IF NOT EXISTS $table_test(key int) USING parquet COMMENT '$table_test'")
val loc = Utils.createTempDir()
statement.execute(s"CREATE EXTERNAL TABLE IF NOT EXISTS $table_external_test(key int)" +
s" COMMENT '$table_external_test' LOCATION '$loc'")
statement.execute(s"CREATE VIEW IF NOT EXISTS $view_test COMMENT '$view_test'" +
s" AS SELECT * FROM $table_test")
statement.execute(s"CREATE GLOBAL TEMP VIEW $view_global_test" +
s" COMMENT '$view_global_test' AS SELECT * FROM $table_test")
val metaData = statement.getConnection.getMetaData
val rs1 = metaData.getTables(null, null, null, null)
var i = 0
while(rs1.next()) {
val catalogName = rs1.getString(TABLE_CAT)
assert(catalogName === "spark_catalog" || catalogName === null)
assert(rs1.getString(TABLE_SCHEM) === schemas(i))
assert(rs1.getString(TABLE_NAME) == tables(i))
assert(rs1.getString(TABLE_TYPE) == tableTypes(i))
assert(rs1.getString(REMARKS) === tables(i).replace(view_global_test, ""))
i += 1
}
assert(i === 4)
val rs2 = metaData.getTables(null, null, null, Array("VIEW"))
i = 2
while(rs2.next()) {
assert(rs2.getString(TABLE_NAME) == tables(i))
i += 1
}
assert(i === 4)
val rs3 = metaData.getTables(null, "*", "*", Array("VIEW"))
i = 2
while(rs3.next()) {
assert(rs3.getString(TABLE_NAME) == tables(i))
i += 1
}
assert(i === 4)
val rs4 = metaData.getTables(null, null, "table%", Array("VIEW"))
assert(!rs4.next())
val rs5 = metaData.getTables(null, "*", "table%", Array("VIEW"))
assert(!rs5.next())
val rs6 = metaData.getTables(null, null, "table%", Array("TABLE"))
i = 0
while(rs6.next()) {
assert(rs6.getString(TABLE_NAME) == tables(i))
i += 1
}
assert(i === 2)
val rs7 = metaData.getTables(null, "default", "%", Array("VIEW"))
i = 2
while(rs7.next()) {
assert(rs7.getString(TABLE_NAME) == view_test)
}
statement.execute(s"DROP TABLE IF EXISTS ${schemas(0)}.$table_test")
statement.execute(s"DROP TABLE IF EXISTS ${schemas(1)}.$table_external_test")
statement.execute(s"DROP VIEW IF EXISTS ${schemas(2)}.$view_test")
statement.execute(s"DROP VIEW IF EXISTS ${schemas(3)}.$view_global_test")
}
}
}

View File

@ -17,8 +17,6 @@
package org.apache.kyuubi
import java.nio.file.Files
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.ha.HighAvailabilityConf.{HA_ZK_ACL_ENABLED, HA_ZK_QUORUM}
@ -31,10 +29,8 @@ trait WithKyuubiServer extends KyuubiFunSuite {
private var zkServer: EmbeddedZookeeper = _
private var server: KyuubiServer = _
private val metastore = Utils.createTempDir()
override def beforeAll(): Unit = {
Files.delete(metastore)
zkServer = new EmbeddedZookeeper()
conf.set(ZookeeperConf.ZK_CLIENT_PORT, -1)
val zkData = Utils.createTempDir()
@ -43,8 +39,7 @@ trait WithKyuubiServer extends KyuubiFunSuite {
zkServer.start()
conf.set("spark.ui.enabled", "false")
conf.set("spark.hadoop.javax.jdo.option.ConnectionURL",
s"jdbc:derby:;databaseName=$metastore;create=true")
conf.setIfMissing("spark.sql.catalogImplementation", "in-memory")
conf.set(FRONTEND_BIND_PORT, 0)
conf.setIfMissing(ENGINE_CHECK_INTERVAL, 3000L)
conf.setIfMissing(ENGINE_IDLE_TIMEOUT, 10000L)

View File

@ -26,8 +26,8 @@ class InitializeSQLSuite extends WithKyuubiServer with JDBCTestUtils {
override protected val conf: KyuubiConf = {
KyuubiConf().set(ENGINE_INITIALIZE_SQL.key,
"CREATE DATABASE IF NOT EXISTS INIT_DB;" +
"CREATE TABLE IF NOT EXISTS INIT_DB.test(a int);" +
"INSERT OVERWRITE TABLE INIT_DB.test SELECT 1;")
"CREATE TABLE IF NOT EXISTS INIT_DB.test(a int) USING CSV;" +
"INSERT OVERWRITE TABLE INIT_DB.test VALUES (1);")
}
override def afterAll(): Unit = {

View File

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kyuubi.operation
import java.nio.file.Files
import org.apache.kyuubi.{Utils, WithKyuubiServer}
import org.apache.kyuubi.config.KyuubiConf
class KyuubiOperationHiveSuite extends WithKyuubiServer with HiveJDBCTests {
private val metastore = {
val dir = Utils.createTempDir()
Files.deleteIfExists(dir)
dir
}
override protected def jdbcUrl: String = getJdbcUrl
override protected val conf: KyuubiConf = {
KyuubiConf()
.set(KyuubiConf.ENGINE_SHARE_LEVEL, "server")
.set("spark.sql.catalogImplementation", "hive")
.set("spark.hadoop.javax.jdo.option.ConnectionURL",
s"jdbc:derby:;databaseName=$metastore;create=true")
}
}