Merge pull request #83 from yaooqinn/i82
fix #82 Start-SparkContext-xxx thread throws java.lang.AbstractMethodError with spark 2.3.0
This commit is contained in:
commit
5478d99bcc
@ -1,4 +1,4 @@
|
|||||||
# Building Kyuui
|
# Building Kyuubi
|
||||||
|
|
||||||
## Building Kyuubi with Apache Maven
|
## Building Kyuubi with Apache Maven
|
||||||
**Kyuubi** server is built based on [Apache Maven](http://maven.apache.org),
|
**Kyuubi** server is built based on [Apache Maven](http://maven.apache.org),
|
||||||
@ -9,18 +9,36 @@
|
|||||||
|
|
||||||
Running the code above in the Kyuubi project root directory is all we need to build a runnable Kyuubi server.
|
Running the code above in the Kyuubi project root directory is all we need to build a runnable Kyuubi server.
|
||||||
|
|
||||||
|
Besides, you can specify a particular maven profile of Spark to build kyuubi towards different Spark versions.
|
||||||
|
|
||||||
|
spark version| maven profile | notes
|
||||||
|
---|---|---
|
||||||
|
1.x.x|(none)| not supported
|
||||||
|
2.0.x|(none)| not supported
|
||||||
|
2.1.2|`-Pspark-2.1` | spark-2.1 is the default profile for building kyuubi and it defacto supports all 2.1.x and above
|
||||||
|
2.2.1|`-Pspark-2.2` | While use Spark 2.2.x and find any incompatible issue, you can specify `-Pspark2.2` to build kyuubi yourself
|
||||||
|
2.3.0|`-Pspark-2.3` | While use Spark 2.3.x and find any incompatible issue, you can specify `-Pspark2.3` to build kyuubi yourself
|
||||||
|
|
||||||
## Building a Runnable Distribution
|
## Building a Runnable Distribution
|
||||||
|
|
||||||
To create a Kyuubi distribution like those distributed by [Kyuubi Release Page](https://github.com/yaooqinn/kyuubi/releases),
|
To create a Kyuubi distribution like those distributed by [Kyuubi Release Page](https://github.com/yaooqinn/kyuubi/releases),
|
||||||
and that is laid out so as to be runnable, use `./build/dist` in the project root directory.
|
and that is laid out so as to be runnable, use `./build/dist` in the project root directory.
|
||||||
|
|
||||||
Example:
|
Example 1:
|
||||||
```bash
|
```bash
|
||||||
./build/dist --name custom-name --tgz
|
./build/dist --name custom-name --tgz
|
||||||
```
|
```
|
||||||
|
|
||||||
This will build a Kyuubi distribution name `kyuubi-{version}-bin-custom-name.tar.gz`. For more information on usage,
|
, which will build a Kyuubi distribution named `kyuubi-{version}-bin-custom-name.tar.gz` for you.
|
||||||
run `./build/dist --help`
|
|
||||||
|
Example 2:
|
||||||
|
```bash
|
||||||
|
./build/dist --tgz -Pspark-2.3
|
||||||
|
```
|
||||||
|
, which will build a Kyuubi distribution named `kyuubi-{version}-bin-spark-2.3.0.tar.gz` for you.
|
||||||
|
|
||||||
|
|
||||||
|
For more information on usage, run `./build/dist --help`
|
||||||
|
|
||||||
## Running Tests
|
## Running Tests
|
||||||
The following is an example of a command to run the tests:
|
The following is an example of a command to run the tests:
|
||||||
|
|||||||
@ -1,10 +1,16 @@
|
|||||||
## Expected behavior
|
### expected behavior
|
||||||
|
|
||||||
## Actual behavior.
|
### actual behavior
|
||||||
|
|
||||||
## Steps to reproduce the problem.
|
### steps to reproduce
|
||||||
|
|
||||||
## Specifications like the version of the project, operating system, or hardware.
|
### specifications
|
||||||
|
|
||||||
|
- version of the project
|
||||||
|
|
||||||
|
- operating system
|
||||||
|
|
||||||
|
- hardware
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -30,7 +30,6 @@ import org.apache.hadoop.security.UserGroupInformation
|
|||||||
import org.apache.spark.annotation.DeveloperApi
|
import org.apache.spark.annotation.DeveloperApi
|
||||||
import org.apache.spark.api.python.PythonWorkerFactory
|
import org.apache.spark.api.python.PythonWorkerFactory
|
||||||
import org.apache.spark.broadcast.BroadcastManager
|
import org.apache.spark.broadcast.BroadcastManager
|
||||||
import org.apache.spark.internal.Logging
|
|
||||||
import org.apache.spark.internal.config._
|
import org.apache.spark.internal.config._
|
||||||
import org.apache.spark.memory.{MemoryManager, StaticMemoryManager, UnifiedMemoryManager}
|
import org.apache.spark.memory.{MemoryManager, StaticMemoryManager, UnifiedMemoryManager}
|
||||||
import org.apache.spark.metrics.MetricsSystem
|
import org.apache.spark.metrics.MetricsSystem
|
||||||
@ -44,6 +43,7 @@ import org.apache.spark.shuffle.ShuffleManager
|
|||||||
import org.apache.spark.storage._
|
import org.apache.spark.storage._
|
||||||
import org.apache.spark.util.{RpcUtils, Utils}
|
import org.apache.spark.util.{RpcUtils, Utils}
|
||||||
|
|
||||||
|
import yaooqinn.kyuubi.Logging
|
||||||
import yaooqinn.kyuubi.utils.ReflectUtils
|
import yaooqinn.kyuubi.utils.ReflectUtils
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -106,7 +106,7 @@ class SparkEnv (
|
|||||||
Utils.deleteRecursively(new File(path))
|
Utils.deleteRecursively(new File(path))
|
||||||
} catch {
|
} catch {
|
||||||
case e: Exception =>
|
case e: Exception =>
|
||||||
logWarning(s"Exception while deleting Spark temp dir: $path", e)
|
warn(s"Exception while deleting Spark temp dir: $path", e)
|
||||||
}
|
}
|
||||||
case None => // We just need to delete tmp dir created by driver, so do nothing on executor
|
case None => // We just need to delete tmp dir created by driver, so do nothing on executor
|
||||||
}
|
}
|
||||||
@ -139,7 +139,7 @@ class SparkEnv (
|
|||||||
}
|
}
|
||||||
|
|
||||||
object SparkEnv extends Logging {
|
object SparkEnv extends Logging {
|
||||||
logInfo("Loaded Kyuubi Supplied SparkEnv Class...")
|
info("Loaded Kyuubi Supplied SparkEnv Class...")
|
||||||
private val env = new ConcurrentHashMap[String, SparkEnv]()
|
private val env = new ConcurrentHashMap[String, SparkEnv]()
|
||||||
|
|
||||||
private[spark] val driverSystemName = "sparkDriver"
|
private[spark] val driverSystemName = "sparkDriver"
|
||||||
@ -149,10 +149,10 @@ object SparkEnv extends Logging {
|
|||||||
|
|
||||||
def set(e: SparkEnv) {
|
def set(e: SparkEnv) {
|
||||||
if (e == null) {
|
if (e == null) {
|
||||||
logDebug(s"Kyuubi: Removing SparkEnv for $user")
|
debug(s"Kyuubi: Removing SparkEnv for $user")
|
||||||
env.remove(user)
|
env.remove(user)
|
||||||
} else {
|
} else {
|
||||||
logDebug(s"Kyuubi: Registering SparkEnv for $user")
|
debug(s"Kyuubi: Registering SparkEnv for $user")
|
||||||
env.put(user, e)
|
env.put(user, e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -161,7 +161,7 @@ object SparkEnv extends Logging {
|
|||||||
* Returns the SparkEnv.
|
* Returns the SparkEnv.
|
||||||
*/
|
*/
|
||||||
def get: SparkEnv = {
|
def get: SparkEnv = {
|
||||||
logDebug(s"Kyuubi: Get SparkEnv for $user")
|
debug(s"Kyuubi: Get SparkEnv for $user")
|
||||||
env.get(user)
|
env.get(user)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -333,7 +333,7 @@ object SparkEnv extends Logging {
|
|||||||
|
|
||||||
val serializer = instantiateClassFromConf[Serializer](
|
val serializer = instantiateClassFromConf[Serializer](
|
||||||
"spark.serializer", "org.apache.spark.serializer.JavaSerializer")
|
"spark.serializer", "org.apache.spark.serializer.JavaSerializer")
|
||||||
logDebug(s"Using serializer: ${serializer.getClass}")
|
debug(s"Using serializer: ${serializer.getClass}")
|
||||||
|
|
||||||
val serializerManager = new SerializerManager(serializer, conf, ioEncryptionKey)
|
val serializerManager = new SerializerManager(serializer, conf, ioEncryptionKey)
|
||||||
|
|
||||||
@ -343,7 +343,7 @@ object SparkEnv extends Logging {
|
|||||||
name: String, endpointCreator: => RpcEndpoint):
|
name: String, endpointCreator: => RpcEndpoint):
|
||||||
RpcEndpointRef = {
|
RpcEndpointRef = {
|
||||||
if (isDriver) {
|
if (isDriver) {
|
||||||
logInfo("Registering " + name)
|
info("Registering " + name)
|
||||||
rpcEnv.setupEndpoint(name, endpointCreator)
|
rpcEnv.setupEndpoint(name, endpointCreator)
|
||||||
} else {
|
} else {
|
||||||
RpcUtils.makeDriverRef(name, conf, rpcEnv)
|
RpcUtils.makeDriverRef(name, conf, rpcEnv)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user