diff --git a/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala b/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala index 801559da9..f2c51af15 100644 --- a/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala +++ b/tools/spark-block-cleaner/src/main/scala/org/apache/kyuubi/tools/KubernetesSparkBlockCleaner.scala @@ -154,13 +154,20 @@ object KubernetesSparkBlockCleaner extends Logging { import scala.sys.process._ private def needToDeepClean(dir: String): Boolean = { - val used = (s"df $dir" #| s"grep $dir").!! - .split(" ").filter(_.endsWith("%")) { - 0 - }.replace("%", "") - info(s"$dir now used $used% space") + try { + val used = (s"df $dir" #| s"grep $dir").!! + .split(" ").filter(_.endsWith("%")) { + 0 + }.replace("%", "") + info(s"$dir now used $used% space") - used.toInt > (100 - freeSpaceThreshold) + used.toInt > (100 - freeSpaceThreshold) + } catch { + case NonFatal(e) => + error(s"An error occurs when querying the disk $dir capacity, " + + s"return true to make sure the disk space will not overruns: ${e.getMessage}") + true + } } private def doCleanJob(dir: String): Unit = {