From 6be635e3a7c4fe11bc98585f74067beed3a4e6e1 Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Thu, 31 May 2018 10:17:08 +0800 Subject: [PATCH 1/2] fix #82 Start-SparkContext-xxx thread throws java.lang.AbstractMethodError with spark 2.3.0 --- src/main/scala/org/apache/spark/SparkEnv.scala | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/scala/org/apache/spark/SparkEnv.scala b/src/main/scala/org/apache/spark/SparkEnv.scala index 1a3d85663..457adebad 100644 --- a/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/src/main/scala/org/apache/spark/SparkEnv.scala @@ -30,7 +30,6 @@ import org.apache.hadoop.security.UserGroupInformation import org.apache.spark.annotation.DeveloperApi import org.apache.spark.api.python.PythonWorkerFactory import org.apache.spark.broadcast.BroadcastManager -import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.memory.{MemoryManager, StaticMemoryManager, UnifiedMemoryManager} import org.apache.spark.metrics.MetricsSystem @@ -44,6 +43,7 @@ import org.apache.spark.shuffle.ShuffleManager import org.apache.spark.storage._ import org.apache.spark.util.{RpcUtils, Utils} +import yaooqinn.kyuubi.Logging import yaooqinn.kyuubi.utils.ReflectUtils /** @@ -106,7 +106,7 @@ class SparkEnv ( Utils.deleteRecursively(new File(path)) } catch { case e: Exception => - logWarning(s"Exception while deleting Spark temp dir: $path", e) + warn(s"Exception while deleting Spark temp dir: $path", e) } case None => // We just need to delete tmp dir created by driver, so do nothing on executor } @@ -139,7 +139,7 @@ class SparkEnv ( } object SparkEnv extends Logging { - logInfo("Loaded Kyuubi Supplied SparkEnv Class...") + info("Loaded Kyuubi Supplied SparkEnv Class...") private val env = new ConcurrentHashMap[String, SparkEnv]() private[spark] val driverSystemName = "sparkDriver" @@ -149,10 +149,10 @@ object SparkEnv extends Logging { def set(e: SparkEnv) { if (e == null) { - logDebug(s"Kyuubi: Removing SparkEnv for $user") + debug(s"Kyuubi: Removing SparkEnv for $user") env.remove(user) } else { - logDebug(s"Kyuubi: Registering SparkEnv for $user") + debug(s"Kyuubi: Registering SparkEnv for $user") env.put(user, e) } } @@ -161,7 +161,7 @@ object SparkEnv extends Logging { * Returns the SparkEnv. */ def get: SparkEnv = { - logDebug(s"Kyuubi: Get SparkEnv for $user") + debug(s"Kyuubi: Get SparkEnv for $user") env.get(user) } @@ -333,7 +333,7 @@ object SparkEnv extends Logging { val serializer = instantiateClassFromConf[Serializer]( "spark.serializer", "org.apache.spark.serializer.JavaSerializer") - logDebug(s"Using serializer: ${serializer.getClass}") + debug(s"Using serializer: ${serializer.getClass}") val serializerManager = new SerializerManager(serializer, conf, ioEncryptionKey) @@ -343,7 +343,7 @@ object SparkEnv extends Logging { name: String, endpointCreator: => RpcEndpoint): RpcEndpointRef = { if (isDriver) { - logInfo("Registering " + name) + info("Registering " + name) rpcEnv.setupEndpoint(name, endpointCreator) } else { RpcUtils.makeDriverRef(name, conf, rpcEnv) From 47deab7bfd66bc48b9e0b4c9a2c52e8b1927309b Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Thu, 31 May 2018 14:13:34 +0800 Subject: [PATCH 2/2] doc --- docs/building.md | 26 ++++++++++++++++++++++---- docs/issue_template.md | 14 ++++++++++---- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/docs/building.md b/docs/building.md index e52de98ea..429b56349 100644 --- a/docs/building.md +++ b/docs/building.md @@ -1,4 +1,4 @@ -# Building Kyuui +# Building Kyuubi ## Building Kyuubi with Apache Maven **Kyuubi** server is built based on [Apache Maven](http://maven.apache.org), @@ -9,18 +9,36 @@ Running the code above in the Kyuubi project root directory is all we need to build a runnable Kyuubi server. +Besides, you can specify a particular maven profile of Spark to build kyuubi towards different Spark versions. + +spark version| maven profile | notes +---|---|--- +1.x.x|(none)| not supported +2.0.x|(none)| not supported +2.1.2|`-Pspark-2.1` | spark-2.1 is the default profile for building kyuubi and it defacto supports all 2.1.x and above +2.2.1|`-Pspark-2.2` | While use Spark 2.2.x and find any incompatible issue, you can specify `-Pspark2.2` to build kyuubi yourself +2.3.0|`-Pspark-2.3` | While use Spark 2.3.x and find any incompatible issue, you can specify `-Pspark2.3` to build kyuubi yourself + ## Building a Runnable Distribution To create a Kyuubi distribution like those distributed by [Kyuubi Release Page](https://github.com/yaooqinn/kyuubi/releases), and that is laid out so as to be runnable, use `./build/dist` in the project root directory. -Example: +Example 1: ```bash ./build/dist --name custom-name --tgz ``` -This will build a Kyuubi distribution name `kyuubi-{version}-bin-custom-name.tar.gz`. For more information on usage, -run `./build/dist --help` +, which will build a Kyuubi distribution named `kyuubi-{version}-bin-custom-name.tar.gz` for you. + +Example 2: +```bash +./build/dist --tgz -Pspark-2.3 +``` +, which will build a Kyuubi distribution named `kyuubi-{version}-bin-spark-2.3.0.tar.gz` for you. + + +For more information on usage, run `./build/dist --help` ## Running Tests The following is an example of a command to run the tests: diff --git a/docs/issue_template.md b/docs/issue_template.md index 324e08536..53c0a679c 100644 --- a/docs/issue_template.md +++ b/docs/issue_template.md @@ -1,10 +1,16 @@ -## Expected behavior +### expected behavior -## Actual behavior. +### actual behavior -## Steps to reproduce the problem. +### steps to reproduce -## Specifications like the version of the project, operating system, or hardware. +### specifications + + - version of the project + + - operating system + + - hardware