From 479e4081c206b5df1a55a4d99b2d07fcce40fa43 Mon Sep 17 00:00:00 2001 From: Michael Armbrust Date: Wed, 9 Sep 2015 22:32:31 -0700 Subject: [PATCH] Add a release process for pushing to DBC --- build.sbt | 48 ++++++++++++++++++++++++++++++++++++++++++--- project/plugins.sbt | 4 ++++ 2 files changed, 49 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 5e7f20c..dcbc6dd 100644 --- a/build.sbt +++ b/build.sbt @@ -5,8 +5,6 @@ scalaVersion := "2.10.4" sparkPackageName := "databricks/spark-sql-perf" -version := "0.1.2-SNAPSHOT" - // All Spark Packages need a license licenses := Seq("Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0")) @@ -23,4 +21,48 @@ initialCommands in console := libraryDependencies += "com.twitter" %% "util-jvm" % "6.23.0" % "provided" -libraryDependencies += "org.scalatest" %% "scalatest" % "2.2.1" % "test" \ No newline at end of file +libraryDependencies += "org.scalatest" %% "scalatest" % "2.2.1" % "test" + +// Your username to login to Databricks Cloud +dbcUsername := sys.env.getOrElse("DBC_USERNAME", sys.error("Please set DBC_USERNAME")) + +// Your password (Can be set as an environment variable) +dbcPassword := sys.env.getOrElse("DBC_PASSWORD", sys.error("Please set DBC_PASSWORD")) + +// The URL to the Databricks Cloud DB Api. Don't forget to set the port number to 34563! +dbcApiUrl := sys.env.getOrElse ("DBC_URL", sys.error("Please set DBC_URL")) + +// Add any clusters that you would like to deploy your work to. e.g. "My Cluster" +// or run dbcExecuteCommand +dbcClusters += sys.env.getOrElse("DBC_USERNAME", sys.error("Please set DBC_USERNAME")) + +dbcLibraryPath := s"/Users/${sys.env.getOrElse("DBC_USERNAME", sys.error("Please set DBC_USERNAME"))}/lib" + +import ReleaseTransformations._ + +/** Push to the team directory instead of the user's homedir for releases. */ +lazy val setupDbcRelease = ReleaseStep( + action = { st: State => + val extracted = Project.extract(st) + val newSettings = extracted.structure.allProjectRefs.map { ref => + dbcLibraryPath in ref := "/databricks/spark/sql/lib" + } + + reapply(newSettings, st) + } +) + +// Add publishing to spark packages as another step. +releaseProcess := Seq[ReleaseStep]( + checkSnapshotDependencies, + inquireVersions, + runTest, + setReleaseVersion, + commitReleaseVersion, + tagRelease, + setupDbcRelease, + releaseStepTask(dbcDeploy), + setNextVersion, + commitNextVersion, + pushChanges +) \ No newline at end of file diff --git a/project/plugins.sbt b/project/plugins.sbt index 0d743f1..b6931a0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,3 +7,7 @@ resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositori addSbtPlugin("org.spark-packages" %% "sbt-spark-package" % "0.1.1") addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0") + +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0") + +addSbtPlugin("com.databricks" %% "sbt-databricks" % "0.1.2") \ No newline at end of file