[KYUUBI #7148] Fix spark.kubernetes.file.upload.path permission
### Why are the changes needed? The default behavior of HDFS is to set the permission of a file created with `FileSystem.create` or `FileSystem.mkdirs` to `(P & ^umask)`, where `P` is the permission in the API call and umask is a system value set by `fs.permissions.umask-mode` and defaults to `0022`. This means, with default settings, any mkdirs call can have at most `755` permissions. The same issue also got reported in [SPARK-30860](https://issues.apache.org/jira/browse/SPARK-30860) ### How was this patch tested? Manual test. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #7148 from pan3793/fs-mkdirs. Closes #7148 7527060ac [Cheng Pan] fix f64913277 [Cheng Pan] Fix spark.kubernetes.file.upload.path permission Authored-by: Cheng Pan <chengpan@apache.org> Signed-off-by: Cheng Pan <chengpan@apache.org>
This commit is contained in:
parent
a3f1e51e78
commit
97f0bae87d
@ -117,11 +117,11 @@ class KyuubiOnKubernetesWithClusterSparkTestsSuite
|
|||||||
override def beforeAll(): Unit = {
|
override def beforeAll(): Unit = {
|
||||||
super.beforeAll()
|
super.beforeAll()
|
||||||
val fs = FileSystem.get(getHadoopConf)
|
val fs = FileSystem.get(getHadoopConf)
|
||||||
fs.mkdirs(
|
FileSystem.mkdirs(
|
||||||
|
fs,
|
||||||
new Path("/spark"),
|
new Path("/spark"),
|
||||||
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
|
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
|
||||||
fs.setPermission(new Path("/"), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
|
fs.setPermission(new Path("/"), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
|
||||||
fs.setPermission(new Path("/spark"), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
|
|
||||||
fs.copyFromLocalFile(new Path(driverTemplate.getPath), new Path("/spark/driver.yml"))
|
fs.copyFromLocalFile(new Path(driverTemplate.getPath), new Path("/spark/driver.yml"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -296,7 +296,8 @@ class SparkProcessBuilder(
|
|||||||
fs = path.getFileSystem(hadoopConf)
|
fs = path.getFileSystem(hadoopConf)
|
||||||
if (!fs.exists(path)) {
|
if (!fs.exists(path)) {
|
||||||
info(s"Try creating $KUBERNETES_FILE_UPLOAD_PATH: $uploadPath")
|
info(s"Try creating $KUBERNETES_FILE_UPLOAD_PATH: $uploadPath")
|
||||||
fs.mkdirs(path, KUBERNETES_UPLOAD_PATH_PERMISSION)
|
// SPARK-30860: use the class method to avoid the umask causing permission issues
|
||||||
|
FileSystem.mkdirs(fs, path, KUBERNETES_UPLOAD_PATH_PERMISSION)
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
case ioe: IOException =>
|
case ioe: IOException =>
|
||||||
@ -410,7 +411,8 @@ object SparkProcessBuilder {
|
|||||||
final val INTERNAL_RESOURCE = "spark-internal"
|
final val INTERNAL_RESOURCE = "spark-internal"
|
||||||
|
|
||||||
final val KUBERNETES_FILE_UPLOAD_PATH = "spark.kubernetes.file.upload.path"
|
final val KUBERNETES_FILE_UPLOAD_PATH = "spark.kubernetes.file.upload.path"
|
||||||
final val KUBERNETES_UPLOAD_PATH_PERMISSION = new FsPermission(Integer.parseInt("777", 8).toShort)
|
final val KUBERNETES_UPLOAD_PATH_PERMISSION =
|
||||||
|
FsPermission.createImmutable(Integer.parseInt("777", 8).toShort)
|
||||||
|
|
||||||
final val YEAR_FMT = DateTimeFormatter.ofPattern("yyyy")
|
final val YEAR_FMT = DateTimeFormatter.ofPattern("yyyy")
|
||||||
final val MONTH_FMT = DateTimeFormatter.ofPattern("MM")
|
final val MONTH_FMT = DateTimeFormatter.ofPattern("MM")
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user