----------------------------------------------------------
导出 MySQL
----------------------------------------------------------
bin/sqoop export \
--connect jdbc:mysql://bigdata-hpsk01.huadian.com/bigdata \
--username root \
--password 123456 \
--table tb_emp \
--columns empno,ename,job,mgr,hiredate,sal,comm,deptno \
--export-dir /user/hive/warehouse/db_hive.db/tb_emp \
--num-mappers 2 \
--input-fields-terminated-by '\t'
----------------------------------------------------------
导入 hdfs
----------------------------------------------------------
bin/sqoop import \
--connect jdbc:mysql://bigdata-hpsk01.huadian.com/bigdata \
--username root \
--password 123456 \
--table tb_emp \
--columns empno,ename,job,mgr,hiredate,sal,comm,deptno \
--target-dir /user/huadian/sqoop/tb_emp_import \
--num-mappers 1 \
--input-fields-terminated-by ',' \
--delete-target-dir
----------------------------------------------------------
导入 hive
----------------------------------------------------------
bin/sqoop import \
--connect jdbc:mysql://bigdata-hpsk01.huadian.com/bigdata \
--username root \
--password 123456 \
--table tb_word \
--columns word,count \
--hive-import \
--hive-database db_hive \
--create-hive-table \
--hive-table ht_word \
--fields-terminated-by ',' \
--num-mappers 1 \
--hive-overwrite \
--delete-target-dir
#19/07/10 23:17:44 ERROR tool.ImportTool:
#Encountered IOException running import job: java.io.IOException:
#java.lang.ClassNotFoundException: org.apache.hadoop.hive.conf.HiveConf
#错误原因:数据已经从MYSQL读取到了,并且已经到了HDFS上,
# SQOOP:依赖Hadoop和Hive,hive相关Jar包没有加载到SQOOP的运行环境上
export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:/opt/cdh5.7.6/hive-1.1.0-cdh5.7.6/lib/*
#需要hive-site.xml放到Hadoop/etc/haddop/下面
ln -s /opt/cdh5.7.6/hive-1.1.0-cdh5.7.6/conf/hive-site.xml /opt/cdh5.7.6/hadoop-2.6.0-cdh5.7.6/etc/hadoop
20150828
date -s 08/29/2015
hive 使用小计
最新推荐文章于 2022-06-05 15:25:19 发布