[KYUUBI #1480] Fix bug in docker-image-tool & Provider image way to config SPARK instead of local copy
<!-- Thanks for sending a pull request! Here are some tips for you: 1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html 2. If the PR is related to an issue in https://github.com/apache/incubator-kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'. 3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'. --> ### _Why are the changes needed?_ <!-- Please clarify why the changes are needed. For instance, 1. If you add a feature, you can talk about the use case of it. 2. If you fix a bug, you can clarify why it is a bug. --> 1. fix when using `-t` may cause build fail, because tag invalid reference format 2. add new option `-S`; Declare SPARK_HOME in Docker Image. When you configured -S, you need to provide an image with Spark as BASE_IMAGE. ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #1495 from zwangsheng/KYUUBI/1480. Closes #1480 0b43985a [zwangsheng] replace -d & add usage 7bc5d946 [zwangsheng] use SPARK_HOME_IN_DOCKER 7b68a8ba [zwangsheng] remove spark_binary 765dc284 [zwangsheng] rename 387590d8 [zwangsheng] use as build 8f2420c2 [zwangsheng] Merge branch 'KYUUBI/1480' of https://github.com/zwangsheng/incubator-kyuubi into KYUUBI/1480 e3407060 [zwangsheng] replace 1486b1ee [zwangsheng] replace a775006c [zwangsheng] add comments f98a7c23 [zwangsheng] 1480 Authored-by: zwangsheng <2213335496@qq.com> Signed-off-by: ulysses-you <ulyssesyou@apache.org>
This commit is contained in:
parent
7beee52448
commit
520bd2653c
@ -49,8 +49,10 @@ function image_ref {
|
||||
fi
|
||||
if [ -n "$TAG" ]; then
|
||||
image="$image:$TAG"
|
||||
else
|
||||
image="$image:$KYUUBI_VERSION"
|
||||
fi
|
||||
echo "$image:$KYUUBI_VERSION"
|
||||
echo "$image"
|
||||
}
|
||||
|
||||
function docker_push {
|
||||
@ -113,16 +115,28 @@ function build {
|
||||
KYUUBI_ROOT="$CTX_DIR/base"
|
||||
fi
|
||||
|
||||
# cp spark for kyuubi as submit client
|
||||
# if user set -s(spark-provider), use if
|
||||
# else use builtin spark
|
||||
local BUILD_ARGS=(${BUILD_PARAMS})
|
||||
|
||||
# mkdir spark-binary to cache spark
|
||||
# clean cache if spark-binary exists
|
||||
if [[ ! -d "$KYUUBI_ROOT/spark-binary" ]]; then
|
||||
mkdir "$KYUUBI_ROOT/spark-binary"
|
||||
else
|
||||
rm -rf "$KYUUBI_ROOT/spark-binary/*"
|
||||
fi
|
||||
if [[ ! -d "$SPARK_HOME" ]]; then
|
||||
error "Cannot found dir $SPARK_HOME, you must configure SPARK_HOME correct."
|
||||
|
||||
# If SPARK_HOME_IN_DOCKER configured,
|
||||
# Kyuubi won't copy local spark into docker image.
|
||||
# Use SPARK_HOME_IN_DOCKER as SPARK_HOME in docker image.
|
||||
if [[ -n "${SPARK_HOME_IN_DOCKER}" ]]; then
|
||||
BUILD_ARGS+=(--build-arg spark_home_in_docker=$SPARK_HOME_IN_DOCKER)
|
||||
BUILD_ARGS+=(--build-arg spark_provided="spark_provided")
|
||||
else
|
||||
if [[ ! -d "$SPARK_HOME" ]]; then
|
||||
error "Cannot found dir $SPARK_HOME, you must configure SPARK_HOME correct."
|
||||
fi
|
||||
cp -r "$SPARK_HOME/" "$KYUUBI_ROOT/spark-binary/"
|
||||
fi
|
||||
cp -r "$SPARK_HOME/" "$KYUUBI_ROOT/spark-binary/"
|
||||
|
||||
# Verify that the Docker image content directory is present
|
||||
if [ ! -d "$KYUUBI_ROOT/docker" ]; then
|
||||
@ -137,19 +151,11 @@ function build {
|
||||
error "Cannot find Kyuubi JARs. This script assumes that Apache Kyuubi has first been built locally or this is a runnable distribution."
|
||||
fi
|
||||
|
||||
local BUILD_ARGS=(${BUILD_PARAMS})
|
||||
|
||||
# If a custom Kyuubi_UID was set add it to build arguments
|
||||
if [ -n "$KYUUBI_UID" ]; then
|
||||
BUILD_ARGS+=(--build-arg kyuubi_uid=$KYUUBI_UID)
|
||||
fi
|
||||
|
||||
local BINDING_BUILD_ARGS=(
|
||||
${BUILD_ARGS[@]}
|
||||
--build-arg
|
||||
base_img=$(image_ref kyuubi)
|
||||
)
|
||||
|
||||
local BASEDOCKERFILE=${BASEDOCKERFILE:-"docker/Dockerfile"}
|
||||
local ARCHS=${ARCHS:-"--platform linux/amd64,linux/arm64"}
|
||||
|
||||
@ -193,6 +199,8 @@ Options:
|
||||
be used separately for each build arg.
|
||||
-s Put the specified Spark into the Kyuubi image to be used as the internal SPARK_HOME
|
||||
of the container.
|
||||
-S Declare SPARK_HOME in Docker Image. When you configured -S, you need to provide an image
|
||||
with Spark as BASE_IMAGE.
|
||||
|
||||
Examples:
|
||||
|
||||
@ -213,6 +221,9 @@ Examples:
|
||||
- Build with Spark placed "/path/spark"
|
||||
$0 -s /path/spark build
|
||||
|
||||
- Build with Spark Image myrepo/spark:3.1.0
|
||||
$0 -S /opt/spark -b BASE_IMAGE=myrepo/spark:3.1.0 build
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
@ -228,7 +239,8 @@ NOCACHEARG=
|
||||
BUILD_PARAMS=
|
||||
KYUUBI_UID=
|
||||
CROSS_BUILD="false"
|
||||
while getopts f:r:t:Xnb:u:s: option
|
||||
SPARK_HOME_IN_DOCKER=
|
||||
while getopts f:r:t:Xnb:u:s:S: option
|
||||
do
|
||||
case "${option}"
|
||||
in
|
||||
@ -240,6 +252,7 @@ do
|
||||
X) CROSS_BUILD=1;;
|
||||
u) KYUUBI_UID=${OPTARG};;
|
||||
s) SPARK_HOME=${OPTARG};;
|
||||
S) SPARK_HOME_IN_DOCKER=${OPTARG};;
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
@ -24,15 +24,25 @@
|
||||
# -t the target repo and tag name
|
||||
# more options can be found with -h
|
||||
|
||||
ARG BASE_IMAGE=8-jre-slim
|
||||
FROM openjdk:${BASE_IMAGE}
|
||||
ARG BASE_IMAGE=openjdk:8-jre-slim
|
||||
ARG spark_provided="spark_builtin"
|
||||
|
||||
FROM ${BASE_IMAGE} as builder_spark_provided
|
||||
ONBUILD ARG spark_home_in_docker
|
||||
ONBUILD ENV SPARK_HOME ${spark_home_in_docker}
|
||||
|
||||
FROM ${BASE_IMAGE} as builder_spark_builtin
|
||||
|
||||
ONBUILD ENV SPARK_HOME /opt/spark
|
||||
ONBUILD RUN mkdir -p ${SPARK_HOME}
|
||||
ONBUILD COPY spark-binary ${SPARK_HOME}
|
||||
|
||||
FROM builder_${spark_provided}
|
||||
|
||||
ARG kyuubi_uid=10009
|
||||
|
||||
USER root
|
||||
|
||||
ENV KYUUBI_HOME /opt/kyuubi
|
||||
ENV SPARK_HOME /opt/spark
|
||||
ENV KYUUBI_LOG_DIR ${KYUUBI_HOME}/logs
|
||||
ENV KYUUBI_PID_DIR ${KYUUBI_HOME}/pid
|
||||
ENV KYUUBI_WORK_DIR_ROOT ${KYUUBI_HOME}/work
|
||||
@ -42,12 +52,11 @@ RUN set -ex && \
|
||||
apt-get update && \
|
||||
apt install -y bash tini libc6 libpam-modules krb5-user libnss3 procps && \
|
||||
useradd -u ${kyuubi_uid} -g root kyuubi && \
|
||||
mkdir -p ${KYUUBI_HOME} ${KYUUBI_LOG_DIR} ${KYUUBI_PID_DIR} ${KYUUBI_WORK_DIR_ROOT} ${SPARK_HOME} && \
|
||||
mkdir -p ${KYUUBI_HOME} ${KYUUBI_LOG_DIR} ${KYUUBI_PID_DIR} ${KYUUBI_WORK_DIR_ROOT} && \
|
||||
chmod ug+rw -R ${KYUUBI_HOME} && \
|
||||
chmod a+rwx -R ${KYUUBI_WORK_DIR_ROOT} && \
|
||||
rm -rf /var/cache/apt/*
|
||||
|
||||
COPY spark-binary ${SPARK_HOME}
|
||||
COPY bin ${KYUUBI_HOME}/bin
|
||||
COPY jars ${KYUUBI_HOME}/jars
|
||||
COPY externals/engines/spark ${KYUUBI_HOME}/externals/engines/spark
|
||||
|
||||
Loading…
Reference in New Issue
Block a user