load data [local] inpath ‘filepath’ [overwrite] into table 表名 [partition(partiton=value,…)]
1.导入Hive表
1.1加载本地(Linux系统)文件到Hive表
load data local inpath '/home/hadoop/tmp/data.txt' into table hive2;
1.2加载HDFS文件到Hive表
hdfs dfs -mkidr /user/data #在HDFS中创建目录
hdfs dfs -put '/home/hadoop/tmp/data.txt' /user/data #上传到HDFS中
load data inpath '/user/data/data.txt' into table hive2; #加载到Hive表中
select * from hive2; #查看
1.3加载HDFS文件并覆盖Hive表之前的数据
load data local inpath '/tmp/data.txt' overwrite into table hive2;
1.4创建Hive表并通过insert加载
create table hive3 like hive2;
insert into table hive3 select * from hive2; #这句会执行MapReduce
2.导出Hive表
2.1Hive表===》Linuxs本地
1.hive的shell窗口进行导出
insert overwrite local '/home/hadoop/tmp/out1' select * from hive1.hive1; #默认格式导出
insert overwrite local '/home/hadoop/tmp/out2' row format delimited fields terminated by '\t' collection items terminated by '\n'; #指定数据数据的格式
2.linuxs命令行窗口进行导出
cd app/hive
bin/hive -e "select * from hive1.hive1;" > /home/hadoop/tmp/out3
2.2Hive表===》HDFS
insert overwrite directory '/user/hive/tmp/out' row format delimited fields terminated by '\t' collection items terminated by '\n' select * from hive1.hive1;