一、安装
1、下载安装包
wget http://gethue.com/downloads/releases/4.0.1/hue-4.0.1.tgz
2、安装依赖
yum install ant asciidoc cyrus-sasl-devel cyrus-sasl-gssapi cyrus-sasl-plain gcc gcc-c++ krb5-devel libffi-devel libxml2-devel libxslt-devel make mysql-devel openldap-devel python-devel sqlite-devel gmp-devel openssl-devel -y
3、解压编译
tar zxf hue-4.0.1.tgz -C /data/
cd /data/hue-4.0.0/
make apps
4、创建数据库
create database hue DEFAULT CHARSET utf8 COLLATE utf8_general_ci;
grant all on *.* to root@'%' identified by '123abcABC';
5、修改配置文件
vim desktop/conf/hue.ini
secret_key=xiaoqiangqianmingshejideCSDNboke
http_host=0.0.0.0
http_port=8888
server_user=hue
server_group=hue
default_user=hue
default_hdfs_superuser=hue
#数据库配置
engine=mysql
host=192.168.137.128
port=3306
user=root
password=123abcABC
name=hue
6、初始化数据库
cd /data/hue-4.0.0/build/env/bin
./hue syncdb
./hue migrate
7、启动hue
useradd hue
chown -R hue:hue hue-4.0.0/
cd /data/hue-4.0.0/build/env/bin
nohup ./supervisor &
登录时设置任意用户及任意密码,不过密码复杂度有要求。
二、hue集成hadoop
1、配置HDFS
cd /data/hadoop/hadoop-2.7.2/etc/hadoop/
vim hdfs-site.xml
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
vim core-site.xml
<property>
<name>dfs.permissions.enabled</name>
<value>false</value>
</property>
<property>
<name>hadoop.proxyuser.hue.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hue.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
vim yarn-site.xml
<!--打开HDFS上日志记录功能-->
<property>
<name>yarn.log-aggregation-enable</name>
<value>true</value>
</property>
<!--在HDFS上聚合的日志最长保留多少秒。3天-->
<property>
<name>yarn.log-aggregation.retain-seconds</name>
<value>259200</value>
</property>
vim hdfs-site.xml
<property>
<name>httpfs.proxyuser.hue.hosts</name>
<value>*</value>
</property>
<property>
<name>httpfs.proxyuser.hue.groups</name>
<value>*</value>
</property>
2、配置hue
cd /data/hue-4.0.0/desktop/conf/
vim hue.ini
#集成hdfs
[hadoop]
fs_defaultfs=hdfs://cluster1
webhdfs_url=http://node1:14000/webhdfs/v1
hadoop_conf_dir=/data/hadoop/hadoop-2.7.2/etc/hadoop
#集成hive
[beeswax]
hive_server_host=node1
hive_server_port=10000
hive_conf_dir=/data/apache-hive-1.2.1-bin/conf
#集成HBASE
[hbase]
hbase_clusters=(Cluster|node1:9090)
hbase_conf_dir=/data/hbase-1.2.6/conf
#集成zookeeper
host_ports=node1:2181,node2:2181,node3:2181
#HBASE开启
./hbase-daemon.sh start thrift
cd /data/hadoop/hadoop-2.7.2/etc/hadoop
vim httpfs-site.xml
<property>
<name>httpfs.proxyuser.hue.hosts</name>
<value>*</value>
</property>
<property>
<name>httpfs.proxyuser.hue.groups</name>
<value>*</value>
</property>
<property>
<name>httpfs.hadoop.config.dir</name>
<value>/data/hadoop/hadoop-2.7.2/etc/hadoop</value>
</property>
#启动httpfs
httpfs.sh start