diff --git a/.travis.yml b/.travis.yml index f3e3c91..6e29baf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,3 +3,5 @@ sudo: false cache: directories: - $HOME/.ivy2 +env: + - DBC_USERNAME="" DBC_PASSWORD="" DBC_URL="" \ No newline at end of file diff --git a/build.sbt b/build.sbt index 5e7f20c..82b4477 100644 --- a/build.sbt +++ b/build.sbt @@ -5,8 +5,6 @@ scalaVersion := "2.10.4" sparkPackageName := "databricks/spark-sql-perf" -version := "0.1.2-SNAPSHOT" - // All Spark Packages need a license licenses := Seq("Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0")) @@ -23,4 +21,48 @@ initialCommands in console := libraryDependencies += "com.twitter" %% "util-jvm" % "6.23.0" % "provided" -libraryDependencies += "org.scalatest" %% "scalatest" % "2.2.1" % "test" \ No newline at end of file +libraryDependencies += "org.scalatest" %% "scalatest" % "2.2.1" % "test" + +// Your username to login to Databricks Cloud +dbcUsername := sys.env.getOrElse("DBC_USERNAME", sys.error("Please set DBC_USERNAME")) + +// Your password (Can be set as an environment variable) +dbcPassword := sys.env.getOrElse("DBC_PASSWORD", sys.error("Please set DBC_PASSWORD")) + +// The URL to the Databricks Cloud DB Api. Don't forget to set the port number to 34563! +dbcApiUrl := sys.env.getOrElse ("DBC_URL", sys.error("Please set DBC_URL")) + +// Add any clusters that you would like to deploy your work to. e.g. "My Cluster" +// or run dbcExecuteCommand +dbcClusters += sys.env.getOrElse("DBC_USERNAME", sys.error("Please set DBC_USERNAME")) + +dbcLibraryPath := s"/Users/${sys.env.getOrElse("DBC_USERNAME", sys.error("Please set DBC_USERNAME"))}/lib" + +import ReleaseTransformations._ + +/** Push to the team directory instead of the user's homedir for releases. */ +lazy val setupDbcRelease = ReleaseStep( + action = { st: State => + val extracted = Project.extract(st) + val newSettings = extracted.structure.allProjectRefs.map { ref => + dbcLibraryPath in ref := "/databricks/spark/sql/lib" + } + + reapply(newSettings, st) + } +) + +// Add publishing to spark packages as another step. +releaseProcess := Seq[ReleaseStep]( + checkSnapshotDependencies, + inquireVersions, + runTest, + setReleaseVersion, + commitReleaseVersion, + tagRelease, + setupDbcRelease, + releaseStepTask(dbcUpload), + setNextVersion, + commitNextVersion, + pushChanges +) diff --git a/project/plugins.sbt b/project/plugins.sbt index 0d743f1..b6931a0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,3 +7,7 @@ resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositori addSbtPlugin("org.spark-packages" %% "sbt-spark-package" % "0.1.1") addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0") + +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0") + +addSbtPlugin("com.databricks" %% "sbt-databricks" % "0.1.2") \ No newline at end of file diff --git a/version.sbt b/version.sbt new file mode 100644 index 0000000..16b90da --- /dev/null +++ b/version.sbt @@ -0,0 +1 @@ +version in ThisBuild := "0.1.3-SNAPSHOT" \ No newline at end of file