diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala index 99ea9d567..0d04e84aa 100644 --- a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala +++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala @@ -154,26 +154,6 @@ case class KyuubiConf(loadSysDefault: Boolean = true) extends Logging { serverOnlyConfEntries.foreach(cloned.unset) cloned } - - /** - * This method is used to convert kyuubi configs to configs that Spark could identify. - * - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf - * - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.` - * - Otherwise, the key will be added a `spark.` prefix - * @return a map with spark specified configs - */ - def toSparkPrefixedConf: Map[String, String] = { - settings.entrySet().asScala.map { e => - val key = e.getKey - if (key.startsWith("spark.")) { - key -> e.getValue - } else if (key.startsWith("hadoop.")) { - "spark.hadoop." + key -> e.getValue - } else { - "spark." + key -> e.getValue - } - }.toMap - } } /** diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala index fb1bd16c1..fe23031d2 100644 --- a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala +++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala @@ -86,16 +86,6 @@ class KyuubiConfSuite extends KyuubiFunSuite { assert(cloned.getOption(key).get === "xyz") } - test("to spark prefixed conf") { - val conf = KyuubiConf(false) - assert(conf.toSparkPrefixedConf.isEmpty) - assert(conf.set("kyuubi.kent", "yao").toSparkPrefixedConf("spark.kyuubi.kent") === "yao") - assert(conf.set("spark.kent", "yao").toSparkPrefixedConf("spark.kent") === "yao") - assert(conf.set("kent", "yao").toSparkPrefixedConf("spark.kent") === "yao") - assert(conf.set("hadoop.kent", "yao").toSparkPrefixedConf("spark.hadoop.hadoop.kent") === "yao") - } - - test("get user specific defaults") { val conf = KyuubiConf().loadFileDefaults() diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala index 4d4a45453..f85cfe376 100644 --- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala +++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala @@ -123,10 +123,24 @@ class SparkProcessBuilder( buffer += executable buffer += CLASS buffer += mainClass - conf.toSparkPrefixedConf.foreach { case (k, v) => + /** + * Converts kyuubi configs to configs that Spark could identify. + * - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf + * - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.` + * - Otherwise, the key will be added a `spark.` prefix + */ + conf.getAll.foreach { case (k, v) => + val newKey = if (k.startsWith("spark.")) { + k + } else if (k.startsWith("hadoop.")) { + "spark.hadoop." + k + } else { + "spark." + k + } buffer += CONF - buffer += s"$k=$v" + buffer += s"$newKey=$v" } + // iff the keytab is specified, PROXY_USER is not supported if (!useKeytab()) { buffer += PROXY_USER diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala index e46407ac6..9d9db4858 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala @@ -245,6 +245,20 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper { val exit3 = pb2.killApplication("unknow") assert(exit3.equals("")) } + + test("add spark prefix for conf") { + val conf = KyuubiConf(false) + conf.set("kyuubi.kent", "yao") + conf.set("spark.vino", "yang") + conf.set("kent", "yao") + conf.set("hadoop.kent", "yao") + val builder = new SparkProcessBuilder("", conf) + val commands = builder.toString.split(' ') + assert(commands.contains("spark.kyuubi.kent=yao")) + assert(commands.contains("spark.vino=yang")) + assert(commands.contains("spark.kent=yao")) + assert(commands.contains("spark.hadoop.hadoop.kent=yao")) + } } class FakeSparkProcessBuilder(config: KyuubiConf)