From 2862325408783eb14dc1f9fff6871927363c4380 Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Thu, 25 Oct 2018 16:53:37 +0800 Subject: [PATCH] fix #108 --- .../org/apache/spark/KyuubiSparkUtil.scala | 5 +++- .../launcher/KyuubiSubmitCommandBuilder.scala | 28 +++++++++++++++---- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/kyuubi-server/src/main/scala/org/apache/spark/KyuubiSparkUtil.scala b/kyuubi-server/src/main/scala/org/apache/spark/KyuubiSparkUtil.scala index ac909d7aa..1aaa91984 100644 --- a/kyuubi-server/src/main/scala/org/apache/spark/KyuubiSparkUtil.scala +++ b/kyuubi-server/src/main/scala/org/apache/spark/KyuubiSparkUtil.scala @@ -50,7 +50,6 @@ object KyuubiSparkUtil extends Logging { private[this] val HIVE_PREFIX = "hive." private[this] val METASTORE_PREFIX = "metastore." - // ENVIRONMENTS val SPARK_HOME: String = System.getenv("SPARK_HOME") val SPARK_JARS_DIR: String = SPARK_HOME + File.separator + "jars" @@ -68,6 +67,9 @@ object KyuubiSparkUtil extends Logging { val DRIVER_CORES: String = SPARK_PREFIX + DRIVER_PREFIX + "cores" val DRIVER_EXTRA_JAVA_OPTIONS: String = SPARK_PREFIX + DRIVER_PREFIX + "extraJavaOptions" + val GC_INTERVAL: String = SPARK_PREFIX + "cleaner.periodicGC.interval" + val GC_INTERVAL_DEFAULT: String = "3min" + val AM_EXTRA_JAVA_OPTIONS: String = AM_PREFIX + "extraJavaOptions" val SPARK_UI_PORT: String = SPARK_PREFIX + UI_PREFIX + "port" @@ -271,6 +273,7 @@ object KyuubiSparkUtil extends Logging { KyuubiConf.getAllDefaults.foreach(kv => conf.setIfMissing(kv._1, kv._2)) conf.setIfMissing(SPARK_LOCAL_DIR, conf.get(KyuubiConf.BACKEND_SESSION_LOCAL_DIR.key)) + conf.setIfMissing(GC_INTERVAL, GC_INTERVAL_DEFAULT) if (UserGroupInformation.isSecurityEnabled) { conf.setIfMissing(HDFS_CLIENT_CACHE, HDFS_CLIENT_CACHE_DEFAULT) diff --git a/kyuubi-server/src/main/scala/org/apache/spark/launcher/KyuubiSubmitCommandBuilder.scala b/kyuubi-server/src/main/scala/org/apache/spark/launcher/KyuubiSubmitCommandBuilder.scala index 0e13e494c..7b472fbb5 100644 --- a/kyuubi-server/src/main/scala/org/apache/spark/launcher/KyuubiSubmitCommandBuilder.scala +++ b/kyuubi-server/src/main/scala/org/apache/spark/launcher/KyuubiSubmitCommandBuilder.scala @@ -50,14 +50,12 @@ class KyuubiSubmitCommandBuilder(args: JList[String]) extends SparkSubmitCommand throw new IllegalArgumentException(msg) } - val memory = firstNonEmpty( - config.get(SparkLauncher.DRIVER_MEMORY), - System.getenv("SPARK_DRIVER_MEMORY"), - DEFAULT_MEM) + val memory = firstNonEmpty(config.get(SparkLauncher.DRIVER_MEMORY), DEFAULT_MEM * 4) cmd.add("-Xmx" + memory) addOptionString(cmd, driverExtraJavaOptions) mergeEnvPathList(env, getLibPathEnvName, config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)) addPermSize(cmd) + addMaxDirectMemSize(cmd) cmd.add("org.apache.spark.deploy.KyuubiSubmit") cmd.addAll(buildSparkSubmitArgs) cmd @@ -80,6 +78,26 @@ class KyuubiSubmitCommandBuilder(args: JList[String]) extends SparkSubmitCommand return } } - cmd.add("-XX:MaxPermSize=256m") + cmd.add("-XX:MaxPermSize=512m") + + cmd.asScala.foreach { arg => + if (arg.contains("-XX:PermSize=")) { + return + } + } + cmd.add("-XX:PermSize=512m") + } + + /** + * Adds max direct memory size option for Spark if the VM requires it and the user hasn't + * set it. + */ + private def addMaxDirectMemSize(cmd: JList[String]): Unit = { + cmd.asScala.foreach { arg => + if (arg.contains("-XX:MaxDirectMemorySize=")) { + return + } + } + cmd.add("-XX:MaxPermSize=4096m") } }