<!-- Thanks for sending a pull request! Here are some tips for you: 1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html 2. If the PR is related to an issue in https://github.com/NetEase/kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'. 3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'. --> ### _Why are the changes needed?_ <!-- Please clarify why the changes are needed. For instance, 1. If you add a feature, you can talk about the use case of it. 2. If you fix a bug, you can clarify why it is a bug. --> ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.readthedocs.io/en/latest/tools/testing.html#running-tests) locally before make a pull request Finally, delta 1.0.0 is out, with Spark 3.1.1 support. - In Spark 3.0, column of CHAR type is not allowed in non-Hive-Serde tables, and CREATE/ALTER TABLE commands will fail if CHAR type is detected. Please use STRING type instead. In Spark version 2.4 and below, CHAR type is treated as STRING type and the length parameter is simply ignored. - Since Spark 3.1, CHAR/CHARACTER and VARCHAR types are supported in the table schema. Table scan/insertion will respect the char/varchar semantic. If char/varchar is used in places other than table schema, an exception will be thrown (CAST is an exception that simply treats char/varchar as string like before). To restore the behavior before Spark 3.1, which treats them as STRING types and ignores a length parameter, e.g. `CHAR(4)`, you can set `spark.sql.legacy.charVarcharAsString` to `true`. Closes #616 from pan3793/tpcds. Closes #616 ed934178 [Cheng Pan] column order d772b43c [Cheng Pan] Fix TPCDS 71b70be7 [Cheng Pan] ci 9518b601 [Cheng Pan] [TEST] [TPCDS] [DELTA] Migrate TPCDS test to Delta Authored-by: Cheng Pan <379377944@qq.com> Signed-off-by: Cheng Pan <379377944@qq.com>
128 lines
4.2 KiB
YAML
128 lines
4.2 KiB
YAML
name: Kyuubi
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- master
|
|
- branch-*
|
|
pull_request:
|
|
branches:
|
|
- master
|
|
- branch-*
|
|
|
|
jobs:
|
|
rat:
|
|
name: Check License
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- uses: actions/checkout@v2
|
|
- uses: actions/setup-java@v1
|
|
with:
|
|
java-version: '1.8'
|
|
- run: build/mvn org.apache.rat:apache-rat-plugin:check
|
|
- name: Upload rat report
|
|
if: failure()
|
|
uses: actions/upload-artifact@v2
|
|
with:
|
|
name: rat
|
|
path: "**/target/rat*.txt"
|
|
|
|
build:
|
|
name: Build
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
matrix:
|
|
profiles:
|
|
- ''
|
|
- '-Pspark-3.0 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.1 -Dspark.archive.name=spark-3.1.1-bin-hadoop2.7.tgz -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest'
|
|
- '-Pspark-3.1 -Pkyuubi-extension-spark_3.1'
|
|
- '-Pspark-master -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper'
|
|
env:
|
|
SPARK_LOCAL_IP: localhost
|
|
steps:
|
|
- uses: actions/checkout@v2
|
|
- name: Setup JDK 1.8
|
|
uses: actions/setup-java@v1
|
|
with:
|
|
java-version: '1.8'
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/com
|
|
key: ${{ runner.os }}-maven-com-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-com-
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/org
|
|
key: ${{ runner.os }}-maven-org-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-org-
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/net
|
|
key: ${{ runner.os }}-maven-net-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-net-
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/io
|
|
key: ${{ runner.os }}-maven-io-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-io-
|
|
- name: Build with Maven
|
|
run: mvn clean install --no-transfer-progress ${{ matrix.profiles }} -Dmaven.javadoc.skip=true -V
|
|
- name: Code coverage
|
|
if: ${{ matrix.profiles == '' }}
|
|
run: bash <(curl -s https://codecov.io/bash)
|
|
- name: Detected Dependency List Change
|
|
if: ${{ ! contains(matrix.profiles, 'spark-master') }}
|
|
run: build/dependency.sh
|
|
- name: Upload test logs
|
|
if: failure()
|
|
uses: actions/upload-artifact@v2
|
|
with:
|
|
name: unit-tests-log
|
|
path: |
|
|
**/target/unit-tests.log
|
|
**/kyuubi-spark-sql-engine.log*
|
|
|
|
tpcds:
|
|
name: TPC-DS Tests
|
|
runs-on: ubuntu-latest
|
|
env:
|
|
SPARK_LOCAL_IP: localhost
|
|
steps:
|
|
- uses: actions/checkout@v2
|
|
- name: Setup JDK 1.8
|
|
uses: actions/setup-java@v1
|
|
with:
|
|
java-version: '1.8'
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/com
|
|
key: ${{ runner.os }}-maven-com-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-com-
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/org
|
|
key: ${{ runner.os }}-maven-org-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-org-
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/net
|
|
key: ${{ runner.os }}-maven-net-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-net-
|
|
- uses: actions/cache@v1
|
|
with:
|
|
path: ~/.m2/repository/io
|
|
key: ${{ runner.os }}-maven-io-${{ hashFiles('**/pom.xml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-maven-io-
|
|
- name: Run TPC-DS Tests
|
|
run: |
|
|
mvn clean install --no-transfer-progress -Pspark-3.1 -DskipTests -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper
|
|
mvn test --no-transfer-progress -Pspark-3.1 -Dtest=none -DwildcardSuites=org.apache.kyuubi.operation.tpcds -Dmaven.plugin.scalatest.exclude.tags=''
|