1.开启metastore 和 hiveserver2服务
[root@sht-sgmhadoopnn-01 bin]# hive --service metastore &[1] 31092
[root@hadoop-01 bin]# hive --service hiveserver2 &
[root@hadoop-01 bin]# ps -ef|grep hive
root 31092 21892 11 21:57 pts/0 00:00:15 /usr/java/jdk1.7.0_67-cloudera/bin/java -Xmx256m -Djava.library.path=/hadoop/hadoop-2.7.2/lib -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/hadoop/hadoop-2.7.2/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/hadoop/hadoop-2.7.2 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,console -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Xmx512m -Dlog4j.configurationFile=hive-log4j2.properties -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.util.RunJar /hadoop/hive-remote-server/lib/hive-service-2.0.0.jar org.apache.hadoop.hive.metastore.HiveMetaStore
root 31206 21892 15 21:57 pts/0 00:00:21 /usr/java/jdk1.7.0_67-cloudera/bin/java -Xmx256m -Djava.library.path=/hadoop/hadoop-2.7.2/lib -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/hadoop/hadoop-2.7.2/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/hadoop/hadoop-2.7.2 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,console -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Xmx512m -Dlog4j.configurationFile=hive-log4j2.properties -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.util.RunJar /hadoop/hive-remote-server/lib/hive-service-2.0.0.jar org.apache.hive.service.server.HiveServer2
[root@hadoop-01 bin]# netstat -nlp |grep 31206
tcp 0 0 0.0.0.0:10000 0.0.0.0:* LISTEN 31206/java
tcp 0 0 0.0.0.0:10002 0.0.0.0:* LISTEN 31206/java
####打开web http://192.168.33.01:10002/hiveserver2.jsp
端口10000 (hive.server2.thrift.port) 和10002
2.beeline调试,远程连接到HiveServer2 http://blog.csdn.net/huanggang028/article/details/44591663
Beeline工作模式有两种,即本地嵌入模式和远程模式。嵌入模式情况下,它返回一个嵌入式的Hive(类似于Hive CLI)。
而远程模式则是通过Thrift协议与某个单独的HiveServer2进程进行连接通信。
[root@hadoop-01 bin]# ./beeline
Beeline version 2.0.0 by Apache Hive
beeline> !connect jdbc:hive2://192.168.33.01:10000 root root
Connecting to jdbc:hive2://192.168.33.01:10000
Error: Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: root is not allowed to impersonate root (state=,code=0)
###错误 User: root is not allowed to impersonate root
3.停止集群和hive,修改配置,同步配置,重启集群和hive
[root@hadoop-01 sbin]# ./stop-all.sh
[root@hadoop-01 sbin]# ps -ef|grep hive
root 31092 21892 11 21:57 pts/0 00:00:15 /usr/java/jdk1.7.0_67-cloudera/bin/java -Xmx256m -Djava.library.path=/hadoop/hadoop-2.7.2/lib -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/hadoop/hadoop-2.7.2/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/hadoop/hadoop-2.7.2 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,console -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Xmx512m -Dlog4j.configurationFile=hive-log4j2.properties -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.util.RunJar /hadoop/hive-remote-server/lib/hive-service-2.0.0.jar org.apache.hadoop.hive.metastore.HiveMetaStore
root 31206 21892 15 21:57 pts/0 00:00:21 /usr/java/jdk1.7.0_67-cloudera/bin/java -Xmx256m -Djava.library.path=/hadoop/hadoop-2.7.2/lib -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/hadoop/hadoop-2.7.2/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/hadoop/hadoop-2.7.2 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,console -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Xmx512m -Dlog4j.configurationFile=hive-log4j2.properties -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.util.RunJar /hadoop/hive-remote-server/lib/hive-service-2.0.0.jar org.apache.hive.service.server.HiveServer2
[root@hadoop-01 sbin]# kill -9 31092
[root@hadoop-01 sbin]# kill -9 31206
[root@hadoop-01 sbin]# cd ../ect/hadoop/
点击(此处)折叠或打开
- [root@hadoop-01 hadoop]# vi core-site.xml
- #用户“root”可以代理所有主机上的所有用户
- <property>
- <name>hadoop.proxyuser.root.hosts</name>
- <value>*</value>
- </property>
- <property>
- <name>hadoop.proxyuser.root.groups</name>
- <value>*</value>
- </property>
[root@hadoop-01 hadoop]# scp core-site.xml root@hadoop-02:/hadoop/hadoop-2.7.2/etc/hadoop/
core-site.xml 100% 1779 1.7KB/s 00:00
[root@hadoop-01 hadoop]# scp core-site.xml root@hadoop-03:/hadoop/hadoop-2.7.2/etc/hadoop/
core-site.xml 100% 1779 1.7KB/s 00:00
[root@hadoop-01 hadoop]# scp core-site.xml root@hadoop-04:/hadoop/hadoop-2.7.2/etc/hadoop/
core-site.xml 100% 1779 1.7KB/s 00:00
[root@hadoop-01 hadoop]# scp core-site.xml root@hadoop-05:/hadoop/hadoop-2.7.2/etc/hadoop/
core-site.xml 100% 1779 1.7KB/s 00:00
[root@hadoop-01 hadoop]# cd ../../sbin
[root@hadoop-01 sbin]# ./start-all.sh
[root@hadoop-01 sbin]# cd /hadoop/hive-remote-server/bin
[root@hadoop-01 bin]# hive --service metastore &
[root@hadoop-01 bin]# hive --service hiveserver2 &
4.beeline再次调试
[root@hadoop-01 bin]# ./beeline
Beeline version 2.0.0 by Apache Hive
beeline> !connect jdbc:hive2://192.168.33.01:10000 root root
Connecting to jdbc:hive2://192.168.33.01:10000
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/hadoop/hive-remote-server/lib/hive-jdbc-2.0.0-standalone.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/hadoop/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
Connected to: Apache Hive (version 2.0.0)
Driver: Hive JDBC (version 2.0.0)
16/06/08 22:32:56 [main]: WARN jdbc.HiveConnection: Request to set autoCommit to false; Hive does not support autoCommit=false.
Transaction isolation: TRANSACTION_REPEATABLE_READ
0: jdbc:hive2://192.168.33.01:10000> show tables;
INFO : Compiling command(queryId=root_20160608223313_3c8f0f43-c860-4127-8962-109e751ea306): show tables
INFO : Semantic Analysis Completed
INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:tab_name, type:string, comment:from deserializer)], properties:null)
INFO : Completed compiling command(queryId=root_20160608223313_3c8f0f43-c860-4127-8962-109e751ea306); Time taken: 1.579 seconds
INFO : Concurrency mode is disabled, not creating a lock manager
INFO : Executing command(queryId=root_20160608223313_3c8f0f43-c860-4127-8962-109e751ea306): show tables
INFO : Starting task [Stage-0:DDL] in serial mode
INFO : Completed executing command(queryId=root_20160608223313_3c8f0f43-c860-4127-8962-109e751ea306); Time taken: 0.075 seconds
INFO : OK
+--------------+--+
| tab_name |
+--------------+--+
| studentinfo |
+--------------+--+
1 row selected (2.06 seconds)
0: jdbc:hive2://192.168.33.01:10000> select * from studentinfo;
INFO : Compiling command(queryId=root_20160608223330_c11ea86f-4c91-49bc-924e-ce6f70c0884e): select * from studentinfo
INFO : Semantic Analysis Completed
INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:studentinfo.id, type:int, comment:null), FieldSchema(name:studentinfo.name, type:string, comment:null), FieldSchema(name:studentinfo.age, type:int, comment:null), FieldSchema(name:studentinfo.tel, type:string, comment:null)], properties:null)
INFO : Completed compiling command(queryId=root_20160608223330_c11ea86f-4c91-49bc-924e-ce6f70c0884e); Time taken: 2.276 seconds
INFO : Concurrency mode is disabled, not creating a lock manager
INFO : Executing command(queryId=root_20160608223330_c11ea86f-4c91-49bc-924e-ce6f70c0884e): select * from studentinfo
INFO : Completed executing command(queryId=root_20160608223330_c11ea86f-4c91-49bc-924e-ce6f70c0884e); Time taken: 0.001 seconds
INFO : OK
+-----------------+-------------------+------------------+------------------+--+
| studentinfo.id | studentinfo.name | studentinfo.age | studentinfo.tel |
+-----------------+-------------------+------------------+------------------+--+
| 1 | a | 26 | 113 |
| 2 | b | 11 | 222 |
+-----------------+-------------------+------------------+------------------+--+
2 rows selected (2.741 seconds)
0: jdbc:hive2://192.168.33.01:10000>