fix #14 default spark.local.dir which points to /tmp is some kind of dangerous

This commit is contained in:
Kent Yao 2018-05-23 17:54:08 +08:00
parent 0e6e4343df
commit 443f2d4f31
5 changed files with 68 additions and 5 deletions

1
.gitignore vendored
View File

@ -28,6 +28,7 @@ kyuubi-*-bin-*
*.gz
logs/
pid/
local/
out/
hs_err_pid*

View File

@ -113,8 +113,7 @@ object KyuubiConf {
" enabled")
.stringConf
.createWithDefault(
s"${sys.env.getOrElse("SPARK_LOG_DIR",
sys.env.getOrElse("SPARK_HOME", System.getProperty("java.io.tmpdir")))}"
s"${sys.env.getOrElse("KYUUBI_LOG_DIR", System.getProperty("java.io.tmpdir"))}"
+ File.separator + "operation_logs")
/////////////////////////////////////////////////////////////////////////////////////////////////
@ -148,7 +147,7 @@ object KyuubiConf {
.createWithDefault(TimeUnit.SECONDS.toMillis(10L))
/////////////////////////////////////////////////////////////////////////////////////////////////
// KyuubiSession //
// Kyuubi Session //
/////////////////////////////////////////////////////////////////////////////////////////////////
val FRONTEND_SESSION_CHECK_INTERVAL: ConfigEntry[Long] =
@ -277,6 +276,14 @@ object KyuubiConf {
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefault(TimeUnit.MINUTES.toMillis(30L))
val BACKEND_SESSION_LOCAL_DIR: ConfigEntry[String] =
KyuubiConfigBuilder("spark.kyuubi.backend.session.local.dir")
.doc("Default value to set spark.local.dir")
.stringConf
.createWithDefault(
s"${sys.env.getOrElse("KYUUBI_HOME", System.getProperty("java.io.tmpdir"))}"
+ File.separator + "local")
/////////////////////////////////////////////////////////////////////////////////////////////////
// Authentication //
/////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -63,6 +63,8 @@ object KyuubiSparkUtil extends Logging {
val METASTORE_JARS = SPARK_PREFIX + SQL_PREFIX + HIVE_PREFIX + "metastore.jars"
val SPARK_LOCAL_DIR = SPARK_PREFIX + "local.dir"
val HIVE_VAR_PREFIX: Regex = """set:hivevar:([^=]+)""".r
val USE_DB: Regex = """use:([^=]+)""".r
val QUEUE = SPARK_PREFIX + YARN_PREFIX + "queue"
@ -121,5 +123,4 @@ object KyuubiSparkUtil extends Logging {
val sMinor = minorVersion(version)
tMajor > sMajor || (tMajor == sMajor && tMinor >= sMinor)
}
}

View File

@ -95,7 +95,7 @@ object KyuubiServer extends Logging {
* Generate proper configurations before server starts
* @param conf the default [[SparkConf]]
*/
private[this] def setupCommonConfig(conf: SparkConf): Unit = {
private[kyuubi] def setupCommonConfig(conf: SparkConf): Unit = {
// will be overwritten later for each SparkContext
conf.setAppName(classOf[KyuubiServer].getSimpleName)
// avoid max port retries reached
@ -122,6 +122,9 @@ object KyuubiServer extends Logging {
}
// Set missing Kyuubi configs to SparkConf
KyuubiConf.getAllDefaults.foreach(kv => conf.setIfMissing(kv._1, kv._2))
conf.setIfMissing(
KyuubiSparkUtil.SPARK_LOCAL_DIR, conf.get(KyuubiConf.BACKEND_SESSION_LOCAL_DIR.key))
}
private[this] def validate(): Unit = {

View File

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package yaooqinn.kyuubi.server
import org.apache.spark.{KyuubiSparkUtil, SparkConf, SparkFunSuite}
class KyuubiServerSuite extends SparkFunSuite {
test("testSetupCommonConfig") {
val conf = new SparkConf(true)
KyuubiServer.setupCommonConfig(conf)
val name = "spark.app.name"
assert(conf.get(name) === "KyuubiServer")
assert(conf.get(KyuubiSparkUtil.SPARK_UI_PORT) === KyuubiSparkUtil.SPARK_UI_PORT_DEFAULT)
assert(conf.get(KyuubiSparkUtil.MULTIPLE_CONTEXTS) ===
KyuubiSparkUtil.MULTIPLE_CONTEXTS_DEFAULT)
assert(conf.get(KyuubiSparkUtil.CATALOG_IMPL) === KyuubiSparkUtil.CATALOG_IMPL_DEFAULT)
assert(conf.get(KyuubiSparkUtil.DEPLOY_MODE) === KyuubiSparkUtil.DEPLOY_MODE_DEFAULT)
assert(conf.get(KyuubiSparkUtil.METASTORE_JARS) !== "builtin")
assert(conf.get(KyuubiSparkUtil.SPARK_UI_PORT) === KyuubiSparkUtil.SPARK_UI_PORT_DEFAULT)
assert(conf.get(KyuubiSparkUtil.SPARK_LOCAL_DIR)
.startsWith(System.getProperty("java.io.tmpdir")))
val foo = "spark.foo"
val e = intercept[NoSuchElementException](conf.get(foo))
assert(e.getMessage === foo)
val bar = "bar"
val conf2 = new SparkConf(loadDefaults = true)
.set(name, "test")
.set(foo, bar)
.set(KyuubiSparkUtil.SPARK_UI_PORT, "1234")
KyuubiServer.setupCommonConfig(conf2)
assert(conf.get(name) === "KyuubiServer") // app name will be overwritten
assert(conf2.get(KyuubiSparkUtil.SPARK_UI_PORT) === KyuubiSparkUtil.SPARK_UI_PORT_DEFAULT)
assert(conf2.get(foo) === bar)
}
}