巡山中需要获取的数据任务有:1. fsimage信息,每日推送
2. 集群作业日志,每日推送
因同时获取两集群数据,因此我通过不同环境变量加载,保存数据到本地并推送至远端
1. fsimage.sh
#!/bin/bash
##############################
## 名称: PickFsimageFromHdfs
## 描述: yrz修改
## 参数: 日期 [20200527]
## 日期: 20200608
###############################fsimage数据
path=/mnt/xdata/xixian1/xx1_fsimage/xx1
curtime=`date -d 'yesterday' +%Y%m%d`
echo "+++++++++++++++++++++$curtime fsimage save $path+++++++++++++++++++"#1集群环境变量加载
source /opt/beh/conf/beh_env1
mv /mnt/xdata/xixian1/xx1_fsimage/xx1/fsimage* /mnt/xdata/xixian1/xx1_bak/fsimage$curtime
hdfs dfsadmin -fetchImage $path
echo "+++++++++++++++++++++ fsimage end+++++++++++++++++++"
done.ex
#!/usr/bin/expect -f
################
##交互式scp脚本
##传参并向远端scp数据
###############
set timeout -1
set DIP [lindex $argv 0]
set PASS [lindex $argv 1]
set SRC_FILE [lindex $argv 2]
set DEST_FILE [lindex $argv 3]
spawn scp -r $SRC_FILE $DIP:$DEST_FILE
expect "*assword:" {
send "$PASS\n"
}
expect "100%"
expect eof
fsimage_scp.sh
#!/bin/bash
expect /mnt/xdata/xixian1/scpdir/done.ex hostname@ip "XXXXXX" /mnt/xdata/xixian1/xx1_fsimage/xx1 /data/hh_xs_f/ops/xunshan/data/fsimage-data/
2. xx1get.sh
#!/bin/bash
##############################
## 名称: PickDataFromHdfs
## 描述: yrz修改
## 参数: 日期 [20200527]
## 日期: 20200527
###############################加载环境变量
source /opt/beh/conf/beh_env1#外部参数
day_id=$1
echo $day_id#统计
curtime=`date +%Y%m%d%H%M%S`#将目录保存到文件
echo "Get File List begin:$curtime"
DIR="/user/history/done/2020/06/${day_id}"
hadoop fs -ls ${DIR}|awk '{print $8}' | tail -n 3 > /mnt/xdata/xixian1/fileList.txt# 第一行数据为空,删掉
# tail -3 /mnt/xdata/xixian1/fileList.txt > /mnt/xdata/xixian1/file.txt
echo "the file successfully"#本地存储目录
LOCAL_DIR="/mnt/xdata/xixian1/xx1/2020/06"
#循环遍历,提取所需数据
cat /mnt/xdata/xixian1/fileList.txt | while read line
do
# 获取hdfs文件 copyToLocal get都可以
hadoop fs -get $line $LOCAL_DIR
echo "${line} is moved to ${LOCAL_DIR} successfully"end=`date +%Y%m%d%H%M%S`
echo "+++++++++++++the Job finishes , ${end}++++++++++++++++++++++++++"
echo "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
done
修改后:
#!/bin/bash
##############################
## 名称: PickDataFromHdfs
## 描述: yrz修改
## 参数: 日期 [20200527]
## 日期: 20200527
###############################加载环境变量
source /opt/beh/conf/beh_env1#外部参数
day_id=$1
echo $day_id#统计
curtime=`date +%Y%m%d%H%M%S`#将目录保存到文件
echo "Get File List begin:$curtime"
DIR=`hadoop fs -ls /user/history/done/2020 |awk '{print $8}'|tail -n 1`
hadoop fs -ls ${DIR}|awk '{print $8}' | tail -n 3 > /mnt/xdata/xixian1/fileList.txt# 第一行数据为空,删掉
# tail -3 /mnt/xdata/xixian1/fileList.txt > /mnt/xdata/xixian1/file.txt
echo "the file successfully"
#本地存储目录
MONTH=` hadoop fs -ls /user/history/done/2020 |awk '{print $8}'|tail -n 1 |awk -F "/" '{print $6}'`
LOCAL_DIR="/mnt/xdata/xixian1/xx1/2020/$MONTH"if [ ! -d $LOCAL_DIR ];then
mkdir $LOCAL_DIR
chmod -R 755 $LOCAL_DIR
else
echo $LOCAL_DIR exist
fi#循环遍历,提取所需数据
cat /mnt/xdata/xixian1/fileList.txt | while read line
do
# 获取hdfs文件 copyToLocal get都可以
hadoop fs -get $line $LOCAL_DIR
echo "${line} is moved to ${LOCAL_DIR} successfully"end=`date +%Y%m%d%H%M%S`
echo "+++++++++++++the Job finishes , ${end}++++++++++++++++++++++++++"
echo "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
done
done.ex
#!/usr/bin/expect -f
set timeout -1
set DIP [lindex $argv 0]
set PASS [lindex $argv 1]
set SRC_FILE [lindex $argv 2]
set DEST_FILE [lindex $argv 3]
spawn scp -r $SRC_FILE $DIP:$DEST_FILE
expect "*assword:" {
send "$PASS\n"
}
expect "100%"
expect eof
done.sh
#!/bin/bash
##scp传输脚本
expect /mnt/xdata/xixian1/scpdir/done.ex hostname@ip "密码" /mnt/xdata/xixian1/xx1 /data/hh_xs_f/ops/xunshan/data/job-data/
xx1ql.sh
#!/bin/bash
##清理作业日志保存目录下保留最新三天数据
ls -lt /mnt/xdata/xixian1/xx1/2020/06 | tail -n +2 | awk 'BEGIN {FS=" "} NR > 3 {print $NF}' | xargs -i rm -rf /mnt/xdata/xixian1/xx1/2020/06/{}
修改后:
#!/bin/bash
MONTH=` hadoop fs -ls /user/history/done/2020 |awk '{print $8}'|tail -n 1 |awk -F "/" '{print $6}'`
ls -lt /mnt/xdata/xixian1/xx1/2020/$MONTH | tail -n +2 | awk 'BEGIN {FS=" "} NR > 3 {print $NF}' | xargs -i rm -rf /mnt/xdata/xixian1/xx1/2020/$MONTH/{}
3. 加入crontab 定时每日执行
10 20 * * * sh /mnt/xdata/xixian1/xx1get.sh >> /mnt/xdata/xixian1/xx1.log 2>&1
15 20 * * * sh /mnt/xdata/xixian2/xx2get.sh >> /mnt/xdata/xixian2/xx2.log 2>&1
00 20 * * * sh /mnt/xdata/xixian1/fsimage.sh >> /mnt/xdata/xixian1/fsimage.log 2>&1
05 20 * * * sh /mnt/xdata/xixian2/fsimage.sh >> /mnt/xdata/xixian2/fsimage.log 2>&1
00 02 * * * sh /mnt/xdata/xixian1/scpdir/done.sh > /mnt/xdata/xixian1/scpdir/done.logs 2>&1
00 02 * * * sh /mnt/xdata/xixian1/scpdir/fsimage.sh > /mnt/xdata/xixian1/scpdir/fsimage.logs 2>&1
05 02 * * * sh /mnt/xdata/xixian2/scpdir/done.sh > /mnt/xdata/xixian2/scpdir/done.logs 2>&1
05 02 * * * sh /mnt/xdata/xixian2/scpdir/fsimage.sh > /mnt/xdata/xixian2/scpdir/fsimage.logs 2>&1#清理done数据
50 01 * * * sh /mnt/xdata/xixian2/scpdir/xx2ql.sh
50 01 * * * sh /mnt/xdata/xixian1/scpdir/xx1ql.sh