[root@hadoop004 ~]# yum install -y ant asciidoc cyrus-sasl-devel cyrus-sasl-gssapi cyrus-sasl-plain gcc gcc-c++ krb5-devel libffi-devel libxml2-devel libxslt-devel make mysql mysql-devel openldap-devel python-devel sqlite-devel gmp-devel
[hadoop@hadoop004 ~]$ vim ~/.bash_profile
export HUE_HOME=/home/hadoop/app/hue-3.9.0-cdh5.7.0
export PATH=$HUE_HOME/bin:$PATH
[hadoop@hadoop004 ~]$ source ~/.bash_profile
[hadoop@hadoop004 software]$ tar -zxf hue-3.9.0-cdh5.7.0.tar.gz -C ~/app
[hadoop@hadoop004 app]$ cd hue-3.9.0-cdh5.7.0/
[hadoop@hadoop004 hue-3.9.0-cdh5.7.0]$ ls
apps desktop ext Makefile Makefile.vars maven README VERSION
cloudera docs LICENSE.txt Makefile.sdk Makefile.vars.priv NOTICE.txt tools
[hadoop@hadoop004 hue-3.9.0-cdh5.7.0]$ make apps
出现下面这些则表示编译成功了
Post-processed 'useradmin/art/icon_useradmin_48.png' as 'useradmin/art/icon_useradmin_48.f78e72adfb01.png'
Post-processed 'useradmin/art/icon_useradmin_24.png' as 'useradmin/art/icon_useradmin_24.50e5ffb9f1c9.png'
Post-processed 'useradmin/art/useradmin-logo.png' as 'useradmin/art/useradmin-logo.e691a8a0fe4e.png'
Post-processed 'zookeeper/help/index.html' as 'zookeeper/help/index.7570dbb625f3.html'
Post-processed 'zookeeper/js/base64.js' as 'zookeeper/js/base64.ce5e02af31e5.js'
Post-processed 'zookeeper/css/zookeeper.css' as 'zookeeper/css/zookeeper.dab3cbab10bb.css'
Post-processed 'zookeeper/art/icon_zookeeper_24.png' as 'zookeeper/art/icon_zookeeper_24.e3168d30a559.png'
Post-processed 'zookeeper/art/line_icons.png' as 'zookeeper/art/line_icons.f50a9ca444bf.png'
Post-processed 'zookeeper/art/icon_zookeeper_48.png' as 'zookeeper/art/icon_zookeeper_48.21fa694512f1.png'
1190 static files copied to '/home/hadoop/app/hue-3.9.0-cdh5.7.0/build/static', 1190 post-processed.
make[1]: Leaving directory `/home/hadoop/app/hue-3.9.0-cdh5.7.0/apps'
[hadoop@hadoop004 hue-3.9.0-cdh5.7.0]$ vim desktop/conf/hue.ini
http_host=hadoop004
time_zone=Asia/Shanghai
webhdfs_url=http://hadoop004:50070/webhdfs/v1
[beeswax]
# Host where HiveServer2 is running.
# If Kerberos security is enabled, use fully-qualified domain name (FQDN).
## hive_server_host=localhost
hive_server_host=hadoop004
# Port where HiveServer2 Thrift server runs on.
## hive_server_port=10000
hive_server_port=10000
# Hive configuration directory, where hive-site.xml is located
## hive_conf_dir=/etc/hive/conf
hive_conf_dir=/home/hadoop/app/hive-1.1.0-cdh5.7.0/conf/
[hadoop@hadoop004 hadoop]$ vim hdfs-site.xml
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
[hadoop@hadoop004 hadoop]$ vim core-site.xml
<!-- Hue -->
<property>
<name>hadoop.proxyuser.hue.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hue.groups</name>
<value>*</value>
</property>
</configuration>
[hadoop@hadoop004 hadoop]$ vim httpfs-site.xml
<property>
<name>httpfs.proxyuser.hue.hosts</name>
<value>*</value>
</property>
<property>
<name>httpfs.proxyuser.hue.groups</name>
<value>*</value>
</property>
[hadoop@hadoop004 conf]$ vim hive-site.xml
<!-- Hue -->
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.thrift.bind.host</name>
<value>hadoop004</value>
</property>
<property>
<name>hive.server2.long.polling.timeout</name>
<value>5000</value>
</property>
<property>
<name>hive.server2.authentication</name>
<value>NONE</value>
</property>
[hadoop@hadoop004 conf]$ jps
22487 ResourceManager
26595 Jps
22045 NameNode
22171 DataNode
22587 NodeManager
22337 SecondaryNameNode
[hadoop@hadoop004 conf]$ nohup hive --service hiveserver2 >~/app/hive-1.1.0-cdh5.7.0/console.log 2>&1 &
[1] 26607
[hadoop@hadoop004 conf]$ beeline -u jdbc:hive2://hadoop004:10000/default -n hadoop
which: no hbase in (/home/hadoop/app/hue-3.9.0-cdh5.7.0/bin:/home/hadoop/app/hive-1.1.0-cdh5.7.0/bin:/home/hadoop/app/apache-maven-3.3.9/bin:/home/hadoop/app/hadoop-2.6.0-cdh5.7.0/bin:/usr/java/jdk1.7.0_45/bin:/home/hadoop/app/hive-1.1.0-cdh5.7.0/bin:/home/hadoop/app/apache-maven-3.3.9/bin:/home/hadoop/app/hadoop-2.6.0-cdh5.7.0/bin:/usr/java/jdk1.7.0_45/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin)
scan complete in 3ms
Connecting to jdbc:hive2://hadoop004:10000/default
Connected to: Apache Hive (version 1.1.0-cdh5.7.0)
Driver: Hive JDBC (version 1.1.0-cdh5.7.0)
Transaction isolation: TRANSACTION_REPEATABLE_READ
Beeline version 1.1.0-cdh5.7.0 by Apache Hive
0: jdbc:hive2://hadoop004:10000/default>
连接时发生异常,信息如下:Error: Could not open client transport with JDBC Uri: jdbc:hive2://hadoop001:10000: null
解决方案:检查发现配置了hive的用户登录导致的,将hive.server2.authentication配置的值改为NONE即可,原先是NOSAL
[hadoop@hadoop004 ~]$ cd $HUE_HOME
[hadoop@hadoop004 hue-3.9.0-cdh5.7.0]$ build/env/bin/supervisor
[INFO] Not running as root, skipping privilege drop
starting server with options:
{'daemonize': False,
'host': 'hadoop004',
'pidfile': None,
'port': 8888,
'server_group': 'hue',
'server_name': 'localhost',
'server_user': 'hue',
'ssl_certificate': None,
'ssl_certificate_chain': None,
'ssl_cipher_list': 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA',
'ssl_private_key': None,
'threads': 40,
'workdir': None}