Update to Spark 1.6

Some internal interfaces changed, so we need to bump the Spark version to run tests on Spark 1.6.

Author: Michael Armbrust <michael@databricks.com>

Closes #29 from marmbrus/spark16.
This commit is contained in:
Michael Armbrust 2015-11-13 12:40:00 -08:00
parent 50808c436b
commit 344b31ed69
2 changed files with 3 additions and 3 deletions

View File

@ -8,7 +8,7 @@ sparkPackageName := "databricks/spark-sql-perf"
// All Spark Packages need a license
licenses := Seq("Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0"))
sparkVersion := "1.4.1"
sparkVersion := "1.6.0-SNAPSHOT"
sparkComponents ++= Seq("sql", "hive")

View File

@ -137,7 +137,7 @@ abstract class Benchmark(
val resultsFuture = Future {
queriesToRun.flatMap { query =>
query.newDataFrame().queryExecution.logical.collect {
case UnresolvedRelation(Seq(name), _) => name
case UnresolvedRelation(t, _) => t.table
}
}.distinct.foreach { name =>
try {
@ -432,7 +432,7 @@ abstract class Benchmark(
lazy val tablesInvolved = buildDataFrame.queryExecution.logical collect {
case UnresolvedRelation(tableIdentifier, _) => {
// We are ignoring the database name.
tableIdentifier.last
tableIdentifier.table
}
}