1、新建docker局域网
docker network create overlay --driver=overlay --attachable
2、新建spark的master和worker
docker run --network overlay --name spark-master -h spark-master -e ENABLE_INIT_DAEMON=false -d bde2020/spark-master:2.4.1-hadoop2.7
docker run --network overlay --name spark-worker-1 -h spark-worker-1 -e ENABLE_INIT_DAEMON=false -d bde2020/spark-worker:2.4.1-hadoop2.7
docker run --network overlay --name spark-worker-2 -h spark-worker-2 -e ENABLE_INIT_DAEMON=false -d bde2020/spark-worker:2.4.1-hadoop2.7
3、测试spark-submit
docker run -it --rm bde2020/spark-submit:2.4.1-hadoop2.7 cat submit.sh
4、spark-submit里使用jar包运行测试
docker run --network overlay -e ENABLE_INIT_DAEMON=false -e SPARK_APPLICATION_JAR_LOCATION="/spark/examples/jars/spark-examples_2.11-2.4.1.jar" -e SPARK_APPLICATION_MAIN_CLASS="org.apache.spark.examples.SparkPi" -e SPARK_MASTER_URL="spark://vm1:7077" -e SPARK_SUBMIT_ARGS="--deploy-mode client" -e SPARK_APPLICATION_ARGS="10" -it --rm bde2020/spark-submit:2.4.1-hadoop2.7