1.hive查看表结构
desc formatted table_name;
desc table_name;
2.hive建立表
create table `reviews_comments_qian`(
`comm_id` bigint,
`post_id` bigint,
`user_id` string,
`comm_time` date,
`comm_content` string,
`auto_brand` string,
`auto_name` string)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\u0001'
LINES TERMINATED BY '\n';
3.hive操作表(Join on)
INSERT INTO reviews_comments_qian SELECT DISTINCT c.comm_id,c.post_id,c.user_id,c.comm_time,c.comm_content,aa.auto_brand,aa.auto_name FROM reviews_comments c JOIN (SELECT a.auto_brand,a.auto_name,b.post_id FROM forum_addresses_s a JOIN forum_posts_s b ON a.addr_id=b.addr_id) aa ON aa.post_id=c.post_id;
4.hive删除表
DROP TABLE IF EXISTS reviews_comments_qianyang;
5.hive更改列名
ALTER TABLE reviews_comments_qianyang CHANGE `buying goal` buying_goal STRING
6.sqoop远程查看数据库
sqoop list-databases -connect jdbc:mysql://mysql服务器ip:3306/ -username mysql用户名 -password mysql密码
7.hive向数据库插数据
sqoop export --connect jdbc:mysql://mysql服务器ip:3306/mysql数据库名 --username mysql用户名 --password mysql密码 --table autohome_qirui --export-dir /user/hive/warehouse/autohome_qirui --input-fields-terminated-by '\001'
8.mysql向hive导入数据
sqoop import --connect jdbc:mysql://114.213.234.104:3306/qianyang --username root --password 112233 --table experiment_user_friends --fields-terminated-by "\t" --lines-terminated-by "\n" --hive-import --hive-overwrite --create-hive-table --hive-table experiment_user_friends_qian --delete-target-dir
9.hive将数据导入到linux本地
hive -e "select * from experiment_user_friendcommentscombine_qian" >> local/wyp.txt
hadoop fs -getmerge /user/hive/warehouse/experiment_user_friendcommentscombine_qian /root/qianyang/experiment_user_friendcommentscombine_qian.txt