spark_main.sh
#! /bin/bash
#这个脚本是用来循环读取一个文件,这个文件就是发布的实时程序,以后再新添实时程序,放到spark_app_name.txt文件里就可以了
while read appName
do
sh /home/mes/report/script/spark/spark_crontab/spark_monitor.sh ${appName}
echo ${appName}
done < /home/mes/report/script/spark/spark_crontab/spark_app_name.txt
spark_app_name.txt
Pmain111
Pmain123
spark_monitor.sh
#! /bin/bash
# 监控yarn的主脚本,任务停止则会重新提交submit
appName=$1
myapp_status=$(yarn application -list| awk '{print $2}' | grep ${appName} | wc -l)
if [ $myapp_status = 0 ]
then
echo ${appName} 'is stop' >> /home/mes/report/script/spark/spark_log/sparklog.txt
strDate=`date +%Y-%m-%d:%H:%M:%S`
strStart="start ${appName} ${strDate}"
echo "${strStart}" >> /home/mes/report/script/spark/spark_log/sparklog.txt
#echo "${strDate} ${appName} fail" | mail -s 'sparkstreaming失败提醒' yang@360.com.cn
nohup sh /home/mes/report/script/spark/spark_submit/${appName##*.}.sh >/dev/null 2>&1 &
echo ${appName} ' restart ' >> /home/mes/report/script/spark/spark_log/sparklog.txt
else
strDate=`date +%Y%m%d%H%M%S`
strRun="${appName} running ${strDate}"
#echo "${strRun}" >> /home/mes/report/script/spark/spark_log/sparklog.txt
fi