sh脚本按照POSIX规范的bash
shell语法,创建变量时等于号左右不要有空格,不然会语法错误
' ' 单引号内包含的内容原样输出,不转义,不能使用变量
" " 可以字符串转义,可以使用变量
`` 执行包含内容命令行
mongodb数据迁移脚本
#!/bin/bash
echo "将临时测试的数据迁移到正式库中"
codes=( "100000" "340000" "520000" )
types=( "official" "unofficial" "comment" )
for type in ${types[*]}; do
for code in ${codes[*]}; do
export_c="${type}_${code}_clean_test"
echo "mongoexport -d DataIntell -c $export_c --type=json -o $export_c.json"
if test ${code} -eq "100000"
then
code=''
else
code='_'${code}
fi
import_c="${type}${code}"
echo "mongoimport -d DataIntell -c $import_c --type=json --file $export_c.json"
rm -rf "$export_c.json"
done
done
sparkstreaming消费kafka
#!/bin/bash
echo "请确保脚本存放在清洗jar包同级目录,并且配置文件在conf目录下,日志在logs目录下!"
jarName=dataintell-etl-1.0.0.jar
codes=("100000" "340000" "520000")
types=("official" "unofficial" "comment")
echo "选择清洗类型:"
for (( i = 0; i < ${#types[*]}; ++ i )); do
echo
echo ${i}.${types[i]};
done
read -p "请选择要清洗数据类型? " i
type=${types[i]}
echo "选择清洗区域:"
for (( i = 0; i < ${#codes[*]}; ++ i )); do
echo
echo ${i}.${codes[i]};
done
read -p "请选择要清洗数据类型? " i
code=${codes[i]}
#if test ${code} == "100000"
#then code=""
#else
# code='_'${code}
#fi
config=$type'_'$code
(nohup java -jar -Dloader.main=com.suncnpap.etl.kafka.consumer.Kafka10Consumer $jarName -p `pwd`/conf/$config.json > logs/$config.log 2>&1 &)
#echo ${cmd}
tail -f logs/$config.log
echo "清洗$config数据完成"