[KYUUBI #1252] Move KyuubiConf#toSparkPrefixedConf out of kyuubi-common module

…

<!--
Thanks for sending a pull request!

Here are some tips for you:
  1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html
  2. If the PR is related to an issue in https://github.com/apache/incubator-kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'.
  3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'.
-->

### _Why are the changes needed?_
<!--
Please clarify why the changes are needed. For instance,
  1. If you add a feature, you can talk about the use case of it.
  2. If you fix a bug, you can clarify why it is a bug.
-->

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [ ] [Run test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests) locally before make a pull request

Closes #1256 from yanghua/KYUUBI-1252.

Closes #1252

8a41a298 [yanghua] Address review suggesion
c78a850d [yanghua] Address review suggesiont
e6aa86c2 [yanghua] Refactor code to address review suggestion
ed39d2dd [yanghua] [KYUUBI #1252] Move KyuubiConf#toSparkPrefixedConf out of kyuubi-common module

Authored-by: yanghua <yanghua1127@gmail.com>
Signed-off-by: Kent Yao <yao@apache.org>
This commit is contained in:
yanghua 2021-10-19 17:09:59 +08:00 committed by Kent Yao
parent 48f8d292d6
commit f41cd30f64
No known key found for this signature in database
GPG Key ID: F7051850A0AF904D
4 changed files with 30 additions and 32 deletions

View File

@ -154,26 +154,6 @@ case class KyuubiConf(loadSysDefault: Boolean = true) extends Logging {
serverOnlyConfEntries.foreach(cloned.unset)
cloned
}
/**
* This method is used to convert kyuubi configs to configs that Spark could identify.
* - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf
* - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.`
* - Otherwise, the key will be added a `spark.` prefix
* @return a map with spark specified configs
*/
def toSparkPrefixedConf: Map[String, String] = {
settings.entrySet().asScala.map { e =>
val key = e.getKey
if (key.startsWith("spark.")) {
key -> e.getValue
} else if (key.startsWith("hadoop.")) {
"spark.hadoop." + key -> e.getValue
} else {
"spark." + key -> e.getValue
}
}.toMap
}
}
/**

View File

@ -86,16 +86,6 @@ class KyuubiConfSuite extends KyuubiFunSuite {
assert(cloned.getOption(key).get === "xyz")
}
test("to spark prefixed conf") {
val conf = KyuubiConf(false)
assert(conf.toSparkPrefixedConf.isEmpty)
assert(conf.set("kyuubi.kent", "yao").toSparkPrefixedConf("spark.kyuubi.kent") === "yao")
assert(conf.set("spark.kent", "yao").toSparkPrefixedConf("spark.kent") === "yao")
assert(conf.set("kent", "yao").toSparkPrefixedConf("spark.kent") === "yao")
assert(conf.set("hadoop.kent", "yao").toSparkPrefixedConf("spark.hadoop.hadoop.kent") === "yao")
}
test("get user specific defaults") {
val conf = KyuubiConf().loadFileDefaults()

View File

@ -123,10 +123,24 @@ class SparkProcessBuilder(
buffer += executable
buffer += CLASS
buffer += mainClass
conf.toSparkPrefixedConf.foreach { case (k, v) =>
/**
* Converts kyuubi configs to configs that Spark could identify.
* - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf
* - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.`
* - Otherwise, the key will be added a `spark.` prefix
*/
conf.getAll.foreach { case (k, v) =>
val newKey = if (k.startsWith("spark.")) {
k
} else if (k.startsWith("hadoop.")) {
"spark.hadoop." + k
} else {
"spark." + k
}
buffer += CONF
buffer += s"$k=$v"
buffer += s"$newKey=$v"
}
// iff the keytab is specified, PROXY_USER is not supported
if (!useKeytab()) {
buffer += PROXY_USER

View File

@ -245,6 +245,20 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper {
val exit3 = pb2.killApplication("unknow")
assert(exit3.equals(""))
}
test("add spark prefix for conf") {
val conf = KyuubiConf(false)
conf.set("kyuubi.kent", "yao")
conf.set("spark.vino", "yang")
conf.set("kent", "yao")
conf.set("hadoop.kent", "yao")
val builder = new SparkProcessBuilder("", conf)
val commands = builder.toString.split(' ')
assert(commands.contains("spark.kyuubi.kent=yao"))
assert(commands.contains("spark.vino=yang"))
assert(commands.contains("spark.kent=yao"))
assert(commands.contains("spark.hadoop.hadoop.kent=yao"))
}
}
class FakeSparkProcessBuilder(config: KyuubiConf)