kyuubi/.github/workflows/master.yml
Kent Yao 64118881aa
[KYUUBI #1720] Extract scalastyle workflow to a single yml and disable it in other jobs
<!--
Thanks for sending a pull request!

Here are some tips for you:
  1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html
  2. If the PR is related to an issue in https://github.com/apache/incubator-kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'.
  3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'.
-->

### _Why are the changes needed?_
<!--
Please clarify why the changes are needed. For instance,
  1. If you add a feature, you can talk about the use case of it.
  2. If you fix a bug, you can clarify why it is a bug.
-->

quick fail on style issue and reduce ga overhead

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests) locally before make a pull request

```
=========dev/kyuubi-tpcds/target/scalastyle-output.xml=========
=========dev/kyuubi-extension-spark-3-1/target/scalastyle-output.xml=========
=========dev/kyuubi-extension-spark-common/target/scalastyle-output.xml=========
=========dev/kyuubi-extension-spark-3-2/target/scalastyle-output.xml=========
=========externals/kyuubi-download/target/scalastyle-output.xml=========
=========externals/kyuubi-trino-engine/target/scalastyle-output.xml=========
=========externals/kyuubi-spark-sql-engine/target/scalastyle-output.xml=========
=========externals/kyuubi-flink-sql-engine/target/scalastyle-output.xml=========
=========kubernetes/integration-tests/target/scalastyle-output.xml=========
=========kyuubi-assembly/target/scalastyle-output.xml=========
=========kyuubi-common/target/scalastyle-output.xml=========
  <error line="22" source="org.scalastyle.file.FileLineLengthChecker" severity="error" message="File line length exceeds 100 characters"/>
=========kyuubi-ctl/target/scalastyle-output.xml=========
=========kyuubi-ha/target/scalastyle-output.xml=========
=========kyuubi-metrics/target/scalastyle-output.xml=========
=========kyuubi-server/target/scalastyle-output.xml=========
=========kyuubi-zookeeper/target/scalastyle-output.xml=========
=========target/scalastyle-output.xml=========
=========tools/spark-block-cleaner/target/scalastyle-output.xml=========

```

Closes #1720 from yaooqinn/style.

Closes #1720

36ca3983 [Kent Yao] Extract scalastyle workflow to a single yml and disable it in other jobs
d83b8077 [Kent Yao] Extract scalastyle workflow to a single yml and disable it in other jobs
7366ca85 [Kent Yao] Extract scalastyle workflow to a single yml and disable it in other jobs
a0b44e26 [Kent Yao] Extract scalastyle workflow to a single yml and disable it in other jobs

Authored-by: Kent Yao <yao@apache.org>
Signed-off-by: Kent Yao <yao@apache.org>
2022-01-11 15:12:23 +08:00

190 lines
6.4 KiB
YAML

name: Kyuubi
on:
push:
branches:
- master
- branch-*
pull_request:
branches:
- master
- branch-*
jobs:
build:
name: Build
runs-on: ubuntu-20.04
strategy:
matrix:
include:
- java: 8
spark: '3.0'
spark-hadoop: '2.7'
- java: 8
spark: '3.1'
spark-hadoop: '2.7'
profiles: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.0.3 -Dspark.archive.name=spark-3.0.3-bin-hadoop2.7.tgz -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.IcebergTest'
- java: 8
spark: '3.1'
spark-hadoop: '3.2'
profiles: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.2.0 -Dspark.archive.name=spark-3.2.0-bin-hadoop3.2.tgz -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.IcebergTest'
- java: 8
spark: '3.1'
spark-hadoop: '3.2'
codecov: 'true'
- java: 8
spark: '3.2'
spark-hadoop: '3.2'
- java: 11
spark: '3.1'
spark-hadoop: '3.2'
profiles: '-DskipTests -Pflink-provided,spark-provided'
env:
SPARK_LOCAL_IP: localhost
steps:
- uses: actions/checkout@v2
- name: Setup JDK ${{ matrix.java }}
uses: actions/setup-java@v2
with:
distribution: zulu
java-version: ${{ matrix.java }}
- uses: actions/cache@v2
with:
path: ~/.m2/repository/com
key: ${{ runner.os }}-maven-com-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-com-
- uses: actions/cache@v2
with:
path: ~/.m2/repository/org
key: ${{ runner.os }}-maven-org-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-org-
- uses: actions/cache@v2
with:
path: ~/.m2/repository/net
key: ${{ runner.os }}-maven-net-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-net-
- uses: actions/cache@v2
with:
path: ~/.m2/repository/io
key: ${{ runner.os }}-maven-io-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-io-
- name: Build with Maven
run: >-
./build/mvn clean install -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -V
-Pspark-${{ matrix.spark }} -Pspark-hadoop-${{ matrix.spark-hadoop }} ${{ matrix.profiles }}
- name: Code coverage
if: ${{ matrix.codecov == 'true' }}
uses: codecov/codecov-action@v2
with:
verbose: true
- name: Detected Dependency List Change
if: ${{ matrix.java == 8 }}
run: build/dependency.sh
- name: Upload test logs
if: failure()
uses: actions/upload-artifact@v2
with:
name: unit-tests-log
path: |
**/target/unit-tests.log
**/kyuubi-flink-sql-engine.log*
**/kyuubi-spark-sql-engine.log*
**/target/scalastyle-output.xml
tpcds:
name: TPC-DS Tests
runs-on: ubuntu-20.04
env:
SPARK_LOCAL_IP: localhost
steps:
- uses: actions/checkout@v2
- name: Setup JDK 8
uses: actions/setup-java@v2
with:
distribution: zulu
java-version: 8
- uses: actions/cache@v2
with:
path: ~/.m2/repository/com
key: ${{ runner.os }}-maven-com-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-com-
- uses: actions/cache@v2
with:
path: ~/.m2/repository/org
key: ${{ runner.os }}-maven-org-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-org-
- uses: actions/cache@v2
with:
path: ~/.m2/repository/net
key: ${{ runner.os }}-maven-net-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-net-
- uses: actions/cache@v2
with:
path: ~/.m2/repository/io
key: ${{ runner.os }}-maven-io-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-io-
- name: Run TPC-DS Tests
run: >-
./build/mvn clean install -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -V
-pl kyuubi-server -am
-Pspark-3.1
-Dmaven.plugin.scalatest.exclude.tags=''
-Dtest=none -DwildcardSuites=org.apache.kyuubi.operation.tpcds
minikube-it:
name: Minikube Integration Test
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v2
# https://github.com/docker/build-push-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build Kyuubi Docker Image
uses: docker/build-push-action@v2
with:
# passthrough CI into build container
build-args: CI=${CI}
context: .
file: build/Dockerfile
load: true
tags: apache/kyuubi:latest
# from https://github.com/marketplace/actions/setup-minikube-kubernetes-cluster
- name: Setup Minikube
uses: manusa/actions-setup-minikube@v2.4.2
with:
minikube version: 'v1.16.0'
kubernetes version: 'v1.19.2'
- name: kubectl pre-check
run: |
kubectl get serviceaccount
kubectl create serviceaccount kyuubi
kubectl get serviceaccount
- name: start kyuubi
run: kubectl apply -f kubernetes/integration-tests/test-k8s.yaml
- name: kyuubi pod check
run: kubectl get pods
- name: integration tests
run: >-
./build/mvn clean install -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -V
-pl kubernetes/integration-tests -am
-Pkubernetes
-Dtest=none -DwildcardSuites=org.apache.kyuubi.kubernetes.test
- name: Upload test logs
if: failure()
uses: actions/upload-artifact@v2
with:
name: unit-tests-log
path: |
**/target/unit-tests.log