1)下载Spark包
wget https://dlcdn.apache.org/spark/spark-3.3.1/spark-3.3.1-bin-hadoop3.tgz
tar -xf spark-3.310-bin-hadoop3.tgz
export SPARK_HOME=/data/k8s/spark/spark/spark-3.3.1-bin-hadoop3
2)镜像打包
cd $SPARK_HOME
#构建镜像:-p ./kubernetes/dockerfiles/spark/Dockerfile,-p 指定Dockerfile
$SPARK_HOME/bin/docker-image-tool.sh -r 镜像仓库ip:端口/dmp -t 3.3.1-hadoop3 build build
#push 镜像到私有仓库
$SPARK_HOME/bin/docker-image-tool.sh -r 镜像仓库ip:端口/dmp -t 3.3.1-hadoop3 push
3)配置 spark 用户权限
kubectl create ns spark
kubectl create serviceaccount spark -n spark
kubectl create clusterrolebinding spark-ro