[KYUUBI #5856] Bump Spark 3.4.2
# 🔍 Description ## Issue References 🔗 This pull request fixes #5856 ## Describe Your Solution 🔧 ## Types of changes 🔖 - [ ] Bugfix (non-breaking change which fixes an issue) - [x] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Test Plan 🧪 #### Behavior Without This Pull Request ⚰️ #### Behavior With This Pull Request 🎉 #### Related Unit Tests --- # Checklists ## 📝 Author Self Checklist - [x] My code follows the [style guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html) of this project - [x] I have performed a self-review - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have made corresponding changes to the documentation - [x] My changes generate no new warnings - [ ] I have added tests that prove my fix is effective or that my feature works - [ ] New and existing unit tests pass locally with my changes - [x] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html) ## 📝 Committer Pre-Merge Checklist - [x] Pull request title is okay. - [x] No license issues. - [x] Milestone correctly set? - [x] Test coverage is ok - [x] Assignees are selected. - [x] Minimum number of approvals - [x] No changes are requested **Be nice. Be informative.** Closes #5859 from zml1206/KYUUBI-5856. Closes #5856 872fd06d2 [zml1206] Revert changes in SparkProcessBuilderSuite bc4996f90 [zml1206] Bump spark 3.4.2 Authored-by: zml1206 <zhuml1206@gmail.com> Signed-off-by: Cheng Pan <chengpan@apache.org>
This commit is contained in:
parent
667b5ab6b8
commit
b3d33cabfa
6
.github/workflows/master.yml
vendored
6
.github/workflows/master.yml
vendored
@ -410,7 +410,7 @@ jobs:
|
||||
with:
|
||||
# passthrough CI into build container
|
||||
build-args: |
|
||||
CI=${CI}
|
||||
CI=${CI}
|
||||
MVN_ARG=--flink-provided --hive-provided -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -Dspotless.check.skip -DskipTests
|
||||
context: .
|
||||
file: build/Dockerfile
|
||||
@ -427,8 +427,8 @@ jobs:
|
||||
# https://minikube.sigs.k8s.io/docs/handbook/pushing/#7-loading-directly-to-in-cluster-container-runtime
|
||||
minikube image load apache/kyuubi:latest
|
||||
# pre-install spark into minikube
|
||||
docker pull apache/spark:3.4.1
|
||||
minikube image load apache/spark:3.4.1
|
||||
docker pull apache/spark:3.4.2
|
||||
minikube image load apache/spark:3.4.2
|
||||
- name: kubectl pre-check
|
||||
run: |
|
||||
kubectl get nodes
|
||||
|
||||
@ -24,7 +24,7 @@ KYUUBI_HADOOP_VERSION=3.3.6
|
||||
POSTGRES_VERSION=12
|
||||
POSTGRES_JDBC_VERSION=42.3.4
|
||||
SCALA_BINARY_VERSION=2.12
|
||||
SPARK_VERSION=3.4.1
|
||||
SPARK_VERSION=3.4.2
|
||||
SPARK_BINARY_VERSION=3.4
|
||||
SPARK_HADOOP_VERSION=3.3.4
|
||||
ZOOKEEPER_VERSION=3.6.3
|
||||
|
||||
@ -31,7 +31,7 @@
|
||||
|
||||
<properties>
|
||||
<gluten.version>1.1.0-SNAPSHOT</gluten.version>
|
||||
<spark.version>3.4.1</spark.version>
|
||||
<spark.version>3.4.2</spark.version>
|
||||
<spark.binary.version>3.4</spark.binary.version>
|
||||
</properties>
|
||||
|
||||
@ -93,7 +93,7 @@
|
||||
<id>gluten-spark-3.4</id>
|
||||
<properties>
|
||||
<maven.plugin.scalatest.include.tags>org.apache.kyuubi.tags.GlutenTest</maven.plugin.scalatest.include.tags>
|
||||
<spark.version>3.4.1</spark.version>
|
||||
<spark.version>3.4.2</spark.version>
|
||||
<spark.binary.version>3.4</spark.binary.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
|
||||
@ -55,7 +55,7 @@ class KyuubiOnKubernetesWithSparkTestsBase extends WithKyuubiServerOnKubernetes
|
||||
Map(
|
||||
"spark.master" -> s"k8s://$miniKubeApiMaster",
|
||||
// We should update spark docker image in ./github/workflows/master.yml at the same time
|
||||
"spark.kubernetes.container.image" -> "apache/spark:3.4.1",
|
||||
"spark.kubernetes.container.image" -> "apache/spark:3.4.2",
|
||||
"spark.kubernetes.container.image.pullPolicy" -> "IfNotPresent",
|
||||
"spark.executor.memory" -> "512M",
|
||||
"spark.driver.memory" -> "1024M",
|
||||
|
||||
@ -50,7 +50,7 @@ abstract class SparkOnKubernetesSuiteBase
|
||||
// TODO Support more Spark version
|
||||
// Spark official docker image: https://hub.docker.com/r/apache/spark/tags
|
||||
KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
|
||||
.set("spark.kubernetes.container.image", "apache/spark:3.4.1")
|
||||
.set("spark.kubernetes.container.image", "apache/spark:3.4.2")
|
||||
.set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
|
||||
.set("spark.executor.instances", "1")
|
||||
.set("spark.executor.memory", "512M")
|
||||
|
||||
4
pom.xml
4
pom.xml
@ -200,7 +200,7 @@
|
||||
DO NOT forget to change the following properties when change the minor version of Spark:
|
||||
`delta.version`, `maven.plugin.scalatest.exclude.tags`
|
||||
-->
|
||||
<spark.version>3.4.1</spark.version>
|
||||
<spark.version>3.4.2</spark.version>
|
||||
<spark.binary.version>3.4</spark.binary.version>
|
||||
<spark.archive.scala.suffix></spark.archive.scala.suffix>
|
||||
<spark.archive.name>spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz</spark.archive.name>
|
||||
@ -2283,7 +2283,7 @@
|
||||
<properties>
|
||||
<delta.artifact>delta-core</delta.artifact>
|
||||
<delta.version>2.4.0</delta.version>
|
||||
<spark.version>3.4.1</spark.version>
|
||||
<spark.version>3.4.2</spark.version>
|
||||
<spark.binary.version>3.4</spark.binary.version>
|
||||
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
|
||||
</properties>
|
||||
|
||||
Loading…
Reference in New Issue
Block a user