Bumpup Spark to 3.0.0 from 3.0.0-preview2

This commit is contained in:
Kent Yao 2020-06-18 17:41:43 +08:00
parent 7fb0b1a32f
commit c2271ac05b
6 changed files with 104 additions and 39 deletions

View File

@ -61,6 +61,16 @@
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</exclusion>
<exclusion>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -117,15 +127,85 @@
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</exclusion>
<exclusion>
<groupId>xmlenc</groupId>
<artifactId>xmlenc</artifactId>
</exclusion>
<exclusion>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
</exclusion>
<exclusion>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</exclusion>
<exclusion>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</exclusion>
<exclusion>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>
</exclusion>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</exclusion>
<exclusion>
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.30</version>
</dependency>
</dependencies>
</project>

View File

@ -126,7 +126,7 @@ class ServiceDiscovery private (
if (serviceNode != null) {
try {
serviceNode.close()
warn(s"This Kyuubi instance ${instance} is now de-registered from ZooKeeper. " +
warn(s"This Kyuubi instance $instance is now de-registered from ZooKeeper. " +
"The server will be shut down after the last client session completes.")
} catch {
case e: IOException =>

View File

@ -36,7 +36,7 @@ object KyuubiServer {
class KyuubiServer(name: String) extends CompositeService(name) {
def this() = this(classOf[KyuubiServer].getName)
def this() = this(classOf[KyuubiServer].getSimpleName)
override def initialize(conf: KyuubiConf): Unit = {
this.conf = conf

View File

@ -47,20 +47,36 @@
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-yarn_${scala.binary.version}</artifactId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>2.4.5</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>2.4.5</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<version>2.4.5</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-yarn_${scala.binary.version}</artifactId>
<version>2.4.5</version>
<scope>${spark.scope}</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>

View File

@ -51,7 +51,8 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
<version>3.0.0-preview2</version>
<version>3.0.0</version>
<scope>provided</scope>
</dependency>
</dependencies>
@ -72,6 +73,7 @@
<shadedArtifactAttached>false</shadedArtifactAttached>
<artifactSet>
<includes>
<!-- -->
<include>org.apache.kyuubi:kyuubi-common</include>
<include>org.apache.kyuubi:kyuubi-ha</include>
</includes>

35
pom.xml
View File

@ -64,7 +64,7 @@
<spark.group>org.apache.spark</spark.group>
<spark.version>2.4.3</spark.version>
<spark.scope>provided</spark.scope>
<hadoop.version>2.6.5</hadoop.version>
<hadoop.version>2.7.4</hadoop.version>
<hadoop.deps.scope>provided</hadoop.deps.scope>
<hive.group>org.spark-project.hive</hive.group>
<hive.version>1.2.1.spark2</hive.version>
@ -121,39 +121,6 @@
<dependencyManagement>
<dependencies>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-yarn_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>${spark.group}</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>