diff --git a/build.sbt b/build.sbt index 332bc7c..cd292a5 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ name := "spark-sql-perf" organization := "com.databricks" -scalaVersion := "2.10.6" +scalaVersion := "2.11.8" crossScalaVersions := Seq("2.10.6", "2.11.8") diff --git a/src/main/scala/com/databricks/spark/sql/perf/Benchmarkable.scala b/src/main/scala/com/databricks/spark/sql/perf/Benchmarkable.scala index f084da8..e5a2996 100644 --- a/src/main/scala/com/databricks/spark/sql/perf/Benchmarkable.scala +++ b/src/main/scala/com/databricks/spark/sql/perf/Benchmarkable.scala @@ -58,10 +58,12 @@ trait Benchmarkable extends Logging { private def afterBenchmark(sc: SparkContext): Unit = { // Best-effort clean up of weakly referenced RDDs, shuffles, and broadcasts System.gc() - // Remove any leftover blocks that still exist - sc.getExecutorStorageStatus - .flatMap { status => status.blocks.map { case (bid, _) => bid } } - .foreach { bid => SparkEnv.get.blockManager.master.removeBlock(bid) } + if (sparkContext.getConf.getBoolean("spark.databricks.benchmark.cleanBlocksAfter", true)) { + // Remove any leftover blocks that still exist + sc.getExecutorStorageStatus + .flatMap { status => status.blocks.map { case (bid, _) => bid } } + .foreach { bid => SparkEnv.get.blockManager.master.removeBlock(bid) } + } } private def runBenchmarkForked( diff --git a/version.sbt b/version.sbt index 982bb14..5f7b5ef 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "0.4.11-SNAPSHOT" +version in ThisBuild := "0.4.12-SNAPSHOT"