shell运行的时候增加ack命令
在提交spark任务的时候担心写错索引名(spark重建索引),所以增加了一个暂停机制,让执行者再check一下,可以使用下面的样例来进行
cat submit.sh
set -e
export JAVA_HOME=/test/jdk1.8.0_181 # // 设置jdk版本
export SPARK_HOME=/test/spark-2.0.1-bin-2.6.0-cdh5.15.0/ # //设置使用spark2
echo "-------------------------------------------------------------------------------------------------"
echo "try to rebuild the talent index"
echo "index can be: test_user_v2_r0 , test_user_v2_r1"
echo "-------------------------------------------------------------------------------------------------"
WRITE_INDEX=$1
echo "indexer to rebuild : $WRITE_INDEX"
if [ -z $WRITE_INDEX ]
then
echo "you need set the index"
exit 1
fi
read -r -p "Are You Sure? [Y/n] " input
case $input in
[yY][eE][sS]|[yY])
echo "Yes, will continue"
;;
[nN][oO]|[nN])
echo "No, will exit"
exit 1
;;
esac
echo " try submit task"
nohup spark-submit --deploy-mode client --master yarn --class com.test.search.test.task.RebuildStarter \
--driver-memory 2G \
--num-executors 80 \
--queue algo \
--conf spark.default.parallelism=80 \
--executor-cores 4 \
--executor-memory 16G \
--conf "spark.yarn.appMasterEnv.JAVA_HOME=/test/jdk1.8.0_181" \
--conf "spark.executorEnv.JAVA_HOME=/test/jdk1.8.0_181" \
--conf "spark.yarn.executor.memoryOverhead=2048" \
--conf application.submit.user=test@test.com \
test-profile-dev-1.0-SNAPSHOT.jar \
$WRITE_INDEX > dev.temp.log 2>&1 &
主要的精华在这里,可以直接测试
read -r -p "Are You Sure? [Y/n] " input
case $input in
[yY][eE][sS]|[yY])
echo "Yes, will continue"
;;
[nN][oO]|[nN])
echo "No, will exit"
exit 1
;;
esac