vim load.sh
#获取腾讯云当前active状态的namenode
active_namenode=''
namenodes='172.17.19.25 172.17.19.90'
for namenode in ${namenodes}
do
curl -s "http://${namenode}:50070/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus" | grep 'active' > /dev/null
if [ $? -eq 0 ]; then
active_namenode=${namenode}
fi
done
echo "active_namenode=${active_namenode}"
for tn in `cat tablename` #遍历这组表里的每一表
do
echo ${tn}
hadoop distcp -Dmapreduce.job.queuename=hive2 -i -update -append hdfs://${active_namenode}:8020/user/hive/warehouse/bdm.db/${tn} hdfs://192.168.14.67:8020/user/hive/warehouse/bdm.db/${tn}
if [ $? -ne 0 ]; then
for namenode in ${namenodes}
do
curl -s "http://${namenode}:50070/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus" | grep 'active' > /dev/null
if [ $? -eq 0 ]; then
active_namenode=${namenode}
fi
done
echo "active_namenode=${active_namenode}"
hadoop distcp -Dmapreduce.job.queuename=hive2 -i -update -append hdfs://${active_namenode}:8020/user/hive/warehouse/bdm.db/${tn} hdfs://192.168.14.67:8020/user/hive/warehouse/bdm.db/${tn}
fi
done
[hadoop]hive表数据迁移
最新推荐文章于 2024-01-04 09:13:55 发布