1. 将老环境中的数据打包下载下来
HADOOP_USER_NAME=hive hadoop fs -ls hdfs://nameservice2/user/hive/warehouse/gfsales.db/t_zyk_price_detail/isonline=online/timedim=2
hadoop fs -get hdfs://nameservice2/user/hive/warehouse/datashow.db/s_used_position_prov
tar -zcvf xxx.tar.gz /xxx 打包压缩
tar -zcvf s_used_position_city1.tar.gz s_used_position_city1
tar -zxvf xxx.tar.gz 解包解压
2. 将文件发送到新环境上
通过ftp或sftp的方式
3. 文件load到hdfs上
前提是在新环境已经创建好对应的空表。
HADOOP_USER_NAME=hdfs hadoop dfs -put /root/wangxinjie/s_used_position_prov1/ hdfs://nameservice1/user/hive/warehouse/datashow.db/
HADOOP_USER_NAME=hdfs hadoop dfs -put /root/wangxinjie/s_used_position_city1/ hdfs://nameservice1/user/hive/warehouse/datashow.db/
HADOOP_USER_NAME=hdfs hadoop dfs -put /root/wangxinjie/s_used_position_town1/ hdfs://nameservice1/user/hive/warehouse/datashow.db/
---给文件夹赋权
HADOOP_USER_NAME=hdfs hadoop fs -chmod -R 777 hdfs://nameservice1/user/hive/warehouse/datashow.db/s_used_position_prov1
HADOOP_USER_NAME=hdfs hadoop fs -chmod -R 777 hdfs://nameservice1/user/hive/warehouse/datashow.db/s_used_position_city1
HADOOP_USER_NAME=hdfs hadoop fs -chmod -R 777 hdfs://nameservice1/user/hive/warehouse/datashow.db/s_used_position_town1
4. 进入到hive环境下修复表
---在新环境进入hive并修补
msck repair table s_used_position_prov1;
msck repair table s_used_position_city1;
msck repair table s_used_position_town1;
---查询并验证数据
select partitiondate,count(1) cnt from gfsales.s_used_position_town1 group by partitiondate order by partitiondate;