## yarn client hdfs文件
spark-submit \
--master yarn \
--deploy-mode cluster \
--driver-memory 1g \
--num-executors 3 \
--executor-memory 1g \
--executor-cores 1 \
--archives hdfs://hadoop102:8020/user/xxx/python_zips/pyspark_env.zip#my_env \
--conf spark.pyspark.driver.python=./my_env/pyspark_env/bin/python3.8 \
--conf spark.pyspark.python=./my_env/pyspark_env/bin/python3.8 \
/tmp/pycharm_project_455/helloworld.py
--archives hdfs://hadoop102:8020/user/xxx/python_zips/pyspark_env.zip#my_env (一定要有#my_env,分发到yarn 容器中解压生成的目录名)
python环境的zip包 上传到hdfs的用户目录下
zip -r pyspark_env.zip pyspark_env/
hdfs://hadoop102:8020/user/xxx/python_zips/pyspark_env.zip