Include publishing to BinTray in release process

After this you should be able to use the library in the shell as follows:

```
bin/spark-shell --packages com.databricks:spark-sql-perf:0.2.3
```

Author: Michael Armbrust <michael@databricks.com>

Closes #46 from marmbrus/publishToMaven.
This commit is contained in:
Michael Armbrust 2015-12-23 00:09:35 -08:00
parent b2e4896efc
commit 7825449eef
2 changed files with 32 additions and 1 deletions

View File

@ -1,6 +1,10 @@
// Your sbt build file. Guides on how to write one can be found at
// http://www.scala-sbt.org/0.13/docs/index.html
name := "spark-sql-perf"
organization := "com.databricks"
scalaVersion := "2.10.4"
sparkPackageName := "databricks/spark-sql-perf"
@ -56,6 +60,28 @@ lazy val setupDbcRelease = ReleaseStep(
}
)
/********************
* Release settings *
********************/
publishMavenStyle := true
releaseCrossBuild := true
licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))
releasePublishArtifactsAction := PgpKeys.publishSigned.value
pomExtra := (
<url>https://github.com/databricks/spark-sql-perf</url>
<scm>
<url>git@github.com:databricks/spark-sql-perf.git</url>
<connection>scm:git:git@github.com:databricks/spark-sql-perf.git</connection>
</scm>
)
bintrayReleaseOnPublish in ThisBuild := false
// Add publishing to spark packages as another step.
releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies,
@ -66,6 +92,7 @@ releaseProcess := Seq[ReleaseStep](
tagRelease,
setupDbcRelease,
releaseStepTask(dbcUpload),
publishArtifacts,
setNextVersion,
commitNextVersion,
pushChanges

View File

@ -10,4 +10,8 @@ addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0")
addSbtPlugin("com.databricks" %% "sbt-databricks" % "0.1.3")
addSbtPlugin("com.databricks" %% "sbt-databricks" % "0.1.3")
addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")