Merge pull request #112 from yaooqinn/KYUUBI-108
fix #108 kyuubi server side direct memory oom
This commit is contained in:
commit
8bbc3d53ac
@ -50,7 +50,6 @@ object KyuubiSparkUtil extends Logging {
|
||||
private[this] val HIVE_PREFIX = "hive."
|
||||
private[this] val METASTORE_PREFIX = "metastore."
|
||||
|
||||
|
||||
// ENVIRONMENTS
|
||||
val SPARK_HOME: String = System.getenv("SPARK_HOME")
|
||||
val SPARK_JARS_DIR: String = SPARK_HOME + File.separator + "jars"
|
||||
@ -68,6 +67,9 @@ object KyuubiSparkUtil extends Logging {
|
||||
val DRIVER_CORES: String = SPARK_PREFIX + DRIVER_PREFIX + "cores"
|
||||
val DRIVER_EXTRA_JAVA_OPTIONS: String = SPARK_PREFIX + DRIVER_PREFIX + "extraJavaOptions"
|
||||
|
||||
val GC_INTERVAL: String = SPARK_PREFIX + "cleaner.periodicGC.interval"
|
||||
val GC_INTERVAL_DEFAULT: String = "3min"
|
||||
|
||||
val AM_EXTRA_JAVA_OPTIONS: String = AM_PREFIX + "extraJavaOptions"
|
||||
|
||||
val SPARK_UI_PORT: String = SPARK_PREFIX + UI_PREFIX + "port"
|
||||
@ -271,6 +273,7 @@ object KyuubiSparkUtil extends Logging {
|
||||
KyuubiConf.getAllDefaults.foreach(kv => conf.setIfMissing(kv._1, kv._2))
|
||||
|
||||
conf.setIfMissing(SPARK_LOCAL_DIR, conf.get(KyuubiConf.BACKEND_SESSION_LOCAL_DIR.key))
|
||||
conf.setIfMissing(GC_INTERVAL, GC_INTERVAL_DEFAULT)
|
||||
|
||||
if (UserGroupInformation.isSecurityEnabled) {
|
||||
conf.setIfMissing(HDFS_CLIENT_CACHE, HDFS_CLIENT_CACHE_DEFAULT)
|
||||
|
||||
@ -50,14 +50,12 @@ class KyuubiSubmitCommandBuilder(args: JList[String]) extends SparkSubmitCommand
|
||||
throw new IllegalArgumentException(msg)
|
||||
}
|
||||
|
||||
val memory = firstNonEmpty(
|
||||
config.get(SparkLauncher.DRIVER_MEMORY),
|
||||
System.getenv("SPARK_DRIVER_MEMORY"),
|
||||
DEFAULT_MEM)
|
||||
val memory = firstNonEmpty(config.get(SparkLauncher.DRIVER_MEMORY), DEFAULT_MEM * 4)
|
||||
cmd.add("-Xmx" + memory)
|
||||
addOptionString(cmd, driverExtraJavaOptions)
|
||||
mergeEnvPathList(env, getLibPathEnvName, config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH))
|
||||
addPermSize(cmd)
|
||||
addMaxDirectMemSize(cmd)
|
||||
cmd.add("org.apache.spark.deploy.KyuubiSubmit")
|
||||
cmd.addAll(buildSparkSubmitArgs)
|
||||
cmd
|
||||
@ -80,6 +78,26 @@ class KyuubiSubmitCommandBuilder(args: JList[String]) extends SparkSubmitCommand
|
||||
return
|
||||
}
|
||||
}
|
||||
cmd.add("-XX:MaxPermSize=256m")
|
||||
cmd.add("-XX:MaxPermSize=512m")
|
||||
|
||||
cmd.asScala.foreach { arg =>
|
||||
if (arg.contains("-XX:PermSize=")) {
|
||||
return
|
||||
}
|
||||
}
|
||||
cmd.add("-XX:PermSize=512m")
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds max direct memory size option for Spark if the VM requires it and the user hasn't
|
||||
* set it.
|
||||
*/
|
||||
private def addMaxDirectMemSize(cmd: JList[String]): Unit = {
|
||||
cmd.asScala.foreach { arg =>
|
||||
if (arg.contains("-XX:MaxDirectMemorySize=")) {
|
||||
return
|
||||
}
|
||||
}
|
||||
cmd.add("-XX:MaxPermSize=4096m")
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user