1、脚本目录路径
[spark@Master data_dir]$ pwd
/home/spark/opt/data_dir
[spark@Master data_dir]$ ls -R /home/spark/opt/data_dir
/home/spark/opt/data_dir:
weixin_data.sh yemao_log_20151214.csv yemao_log_20151215.csv yesterday.list
2、Shell批处理脚本内容
[spark@Master data_dir]$ cat weixin_data.sh
#/bin/bash
#echo -n "please enter a day for runing :"
#read yesterday
while read yesterday
do
cd /home/spark/opt/data_dir/
/home/spark/opt/mongodb-2.7.0/bin/mongoexport -d yemao -c yemao_log_$yesterday --csv --query '{"browser":/MicroMessenger/}' -f id,time,url_from,url_current,url_to,token -o /home/spark/opt/data_dir/yemao_log_$yesterday.csv
/usr/local/mysql/bin/mysql -udatahouse -pDTHS2016 -h120.55.189.188 -P3306 --default-character-set=utf8 -e "use logdata; LOAD DATA