一、环境
组件 | 版本 | 备注 |
---|
Apache Spark | 3.3.0 | |
Apache hadoop | 3.3.2 | |
Apache Iceberg | 0.14.0 | |
Volcano | 1.6.0 | |
Java | 1.8.111 | spark 源码编译依赖 |
Apache Maven | 3.8.6 | |
二、Docker 镜像构建
BASE_DIR=/opt/soft/lakehouse/spark
USER_HOME=/home/luke
HARBOR_PATH=core.harbor.apps.g66666.hk.my-demo.tech/lakehouse
SPARK_VERSION=3.3.0
SPARK_BINARY_VERSION=3.3
SCALA_BINARY_VERSION=2.12
HADOOP_VERSION=3.3.2
ICEBERG_VERSION=0.14.0
AWS_SDK_BUNDLE_VERSION=2.17.131
AWS_JAVA_SDK_BUNDLE_VERSION=1.11.1026
cd ${BASE_DIR}
wget https://dlcdn.apache.org/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}.tgz
tar -zxf spark-${SPARK_VERSION}.tgz
cd spark-${SPARK_VERSION}
./dev/make-distribution.sh --name spark-volcano-${SPARK_VERSION} --pip --tgz -Dhadoop.version=${HADOOP_VERSION} -Phive -Phive-thriftserver -Pkubernetes -Pvolcano
${BASE_DIR}/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-spark-volcano-${SPARK_VERSION}.tgz
cd ${BASE_DIR}
tar -zxf ${BASE_DIR}/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-spark-volcano-${SPARK_VERSION}.tgz \
-C ${BASE_DIR}/
cd spark-${SPARK_VERSION}-bin-spark-volcano-${SPARK_VERSION}/jars
mvn dependency:get -DremoteRepositories=http://repo1.maven.org/maven2/ -DgroupId=software.amazon.awssdk -DartifactId=bundle -Dversion=${AWS_SDK_BUNDLE_VERSION}
mvn dependency:get -DremoteRepositories=http://repo1.maven.org/maven2/ -DgroupId=software.amazon.awssdk -DartifactId=url-connection-client -Dversion=${AWS_SDK_BUNDLE_VERSION}
cp ${USER_HOME}/.m2/repository/software/amazon/awssdk/bundle/${AWS_SDK_BUNDLE_VERSION}/bundle-${AWS_SDK_BUNDLE_VERSION}.jar .
cp ${USER_HOME}/.m2/repository/software/amazon/awssdk/url-connection-client/${AWS_SDK_BUNDLE_VERSION}/url-connection-client-${AWS_SDK_BUNDLE_VERSION}.jar .
wget https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-spark-runtime-${SPARK_BINARY_VERSION}_${SCALA_BINARY_VERSION}/${ICEBERG_VERSION}/iceberg-spark-runtime-${SPARK_BINARY_VERSION}_${SCALA_BINARY_VERSION}-${ICEBERG_VERSION}.jar
wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/${HADOOP_VERSION}/hadoop-aws-${HADOOP_VERSION}.jar
wget https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/${AWS_JAVA_SDK_BUNDLE_VERSION}/aws-java-sdk-bundle-${AWS_JAVA_SDK_BUNDLE_VERSION}.jar
cd ${BASE_DIR}/spark-${SPARK_VERSION}-bin-spark-volcano-${SPARK_VERSION}
cat << EOF | tee Dockerfile
ARG java_image_tag=11-jre-slim
FROM openjdk:\${java_image_tag}
ARG spark_uid=185
# Before building the docker image, first build and make a Spark distribution following
# the instructions in http://spark.apache.org/docs/latest/building-spark.html.
# If this docker file is being used in the context of building your images from a Spark
# distribution, the docker build command should be invoked from the top level directory
# of the Spark distribution. E.g.:
# docker build -t spark:latest -f kubernetes/dockerfiles/spark/Dockerfile .
RUN set -ex && \
sed -i 's/http:\/\/deb.\(.*\)/https:\/\/deb.\1/g' /etc/apt/sources.list && \
apt-get update && \
ln -s /lib /lib64 && \
apt install -y bash tini libc6 libpam-modules krb5-user libnss3 procps && \
mkdir -p /opt/spark && \
mkdir -p /opt/spark/examples && \
mkdir -p /opt/spark/work-dir && \
touch /opt/spark/RELEASE && \
rm /bin/sh && \
ln -sv /bin/bash /bin/sh && \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
rm -rf /var/cache/apt/*
COPY jars /opt/spark/jars
COPY bin /opt/spark/bin
COPY sbin /opt/spark/sbin
COPY kubernetes/dockerfiles/spark/entrypoint.sh /opt/
COPY kubernetes/dockerfiles/spark/decom.sh /opt/
COPY examples /opt/spark/examples
COPY kubernetes/tests /opt/spark/tests
COPY conf /opt/spark/conf
COPY data /opt/spark/data
ENV SPARK_HOME /opt/spark
WORKDIR /opt/spark/work-dir
RUN chmod g+w /opt/spark/work-dir
RUN chmod a+x /opt/decom.sh
ENTRYPOINT [ "/opt/entrypoint.sh" ]
# Specify the User that the actual main process will run as
USER \${spark_uid}
EOF
cd ${BASE_DIR}/spark-${SPARK_VERSION}-bin-spark-volcano-${SPARK_VERSION}
sudo docker build -t spark-volcano:${SPARK_VERSION} .
sudo docker tag spark-volcano:${SPARK_VERSION} ${HARBOR_PATH}/spark-volcano:${SPARK_VERSION}
sudo docker push ${HARBOR_PATH}/spark-volcano:${SPARK_VERSION}
![](https://i-blog.csdnimg.cn/blog_migrate/b5370a6df015e4128ae4efff2aaac577.jpeg)