Add profile for Spark 3.2/3.3 (#380)

This commit is contained in:
Cheng Pan 2022-08-17 22:27:43 +08:00 committed by GitHub
parent e1ece6123d
commit 9b6ec58e2a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 66 additions and 6 deletions

View File

@ -21,6 +21,8 @@ jobs:
- '2.4'
- '3.0'
- '3.1'
- '3.2'
- '3.3'
steps:
- uses: actions/checkout@v2
- name: Setup JDK ${{ matrix.java }}

View File

@ -44,12 +44,12 @@ RSS Worker's slot count is decided by `rss.worker.numSlots` or`rss.worker.flush.
RSS worker's slot count decreases when a partition is allocated and increments when a partition is freed.
## Build
RSS supports Spark 2.4/3.0/3.1 and only tested under Java 8.
RSS supports Spark 2.4/3.0/3.1/3.2/3.3 and only tested under Java 8.
Build for Spark
`
./dev/make-distribution.sh -Pspark-2.4/-Pspark-3.0/-Pspark-3.1 -Plog4j-1/-Plog4j-2
`
```
./dev/make-distribution.sh -Pspark-2.4/-Pspark-3.0/-Pspark-3.1/-Pspark-3.2/Spark-3.3 -Plog4j-1/-Plog4j-2
```
package rss-${project.version}-bin-release.tgz will be generated.
@ -67,9 +67,9 @@ Build procedure will create a compressed package.
### Compatibility
RSS server is compatible with all supported Spark versions.
You can run different Spark versions with the same RSS server. It doesn't matter whether RSS server is compiled with -Pspark-2.4/3.0/3.1.
You can run different Spark versions with the same RSS server. It doesn't matter whether RSS server is compiled with -Pspark-2.4/3.0/3.1/3.2/3.3.
However, RSS client must be consistent with the version of the Spark.
For example, if you are running Spark 2.4, you must compile RSS client with -Pspark-2.4; if you are running Spark 3.0, you must compile RSS client with -Pspark-3.0.
For example, if you are running Spark 2.4, you must compile RSS client with -Pspark-2.4; if you are running Spark 3.2, you must compile RSS client with -Pspark-3.2.
## Usage
RSS supports HA mode deployment.

5
dev/check-spark-3.2.sh Executable file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
RSS_HOME="$(cd "`dirname "$0"`/.."; pwd)"
$RSS_HOME/dev/check.sh -Pspark-3.2 -Plog4j-1

5
dev/check-spark-3.3.sh Executable file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
RSS_HOME="$(cd "`dirname "$0"`/.."; pwd)"
$RSS_HOME/dev/check.sh -Pspark-3.3 -Plog4j-2

48
pom.xml
View File

@ -630,6 +630,54 @@
</dependencies>
</profile>
<profile>
<id>spark-3.2</id>
<properties>
<jackson.version>2.12.3</jackson.version>
<jackson.databind.version>2.12.3</jackson.databind.version>
<scala.version>2.12.15</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<spark.version>3.2.2</spark.version>
<rss.shuffle.manager>shuffle-manager-3</rss.shuffle.manager>
</properties>
<dependencies>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.databind.version}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>spark-3.3</id>
<properties>
<jackson.version>2.13.3</jackson.version>
<jackson.databind.version>2.13.3</jackson.databind.version>
<scala.version>2.12.15</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<spark.version>3.3.0</spark.version>
<rss.shuffle.manager>shuffle-manager-3</rss.shuffle.manager>
</properties>
<dependencies>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.databind.version}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>log4j-1</id>
<dependencies>