114.Livy配置

  • 环境
    • CDH集群
  • 把Livy包上传至CDH集群的节点
# scp livy.tar.gz ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal:/opt/cloudera/
  • 解压livy.tar.gz包
# cd /opt/cloudera/
# tar -zxvf livy.tar.gz
  • 添加livy用户
# useradd livy -g hadoop
# id livy
  • livy修改为目录的属主
# chown -R livy:hadoop livy
# ll livy
  • log目录和run目录的创建
# mkdir /var/log/livy
# mkdir /var/run/livy
# chown livy:hadoop /var/log/livy
# chown livy:hadoop /var/run/livy
  • 创建配置文件
# scp livy-env.sh.template livy-env.sh
# scp spark-blacklist.conf.template spark-blacklist.conf
# scp livy.conf.template livy.conf
# chown livy:hadoop livy.conf livy-env.sh spark-blacklist.conf
  • 创建livy的Kerberos
kadmin.local -q "addprinc -randkey livy/ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal@FAYSON.COM"
kadmin.local -q "addprinc -randkey HTTP/ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal@FAYSON.COM"
kadmin.local -q "xst -k /root/livy.service.keytab livy/ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal@FAYSON.COM"
kadmin.local -q "xst -k /root/spnego.service.keytab HTTP/ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal@FAYSON.COM"
  • 生成的keytab文件拷贝至livy
# scp livy.service.keytab spnego.service.keytab ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal:/etc/security/keytabs/
  • livy:hadoop为文件属主
# cd /etc/security/keytabs/
# chown livy:hadoop livy.service.keytab 
# chown livy:hadoop spnego.service.keytab 
# ll
  • livy.conf的配置文件修改
livy.spark.master = yarn
livy.spark.deployMode = cluster
livy.environment = production
livy.impersonation.enabled = true
livy.server.csrf_protection.enabled true
livy.server.port = 8998
livy.server.session.timeout = 3600000
livy.server.recovery.mode = recovery
livy.server.recovery.state-store=filesystem
livy.server.recovery.state-store.url=/tmp/livy
livy.server.auth.type = kerberos
livy.server.auth.kerberos.keytab = /etc/security/keytabs/spnego.service.keytab
livy.server.auth.kerberos.principal = HTTP/ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal
livy.server.launch.kerberos.keytab = /etc/security/keytabs/livy.service.keytab
livy.server.launch.kerberos.principal = livy/ip-xxx-xx-xx-xx.ap-southeast-1.compute.internal
  • livy-env.sh增加Haoop和Spark的配置
export JAVA_HOME=/usr/java/jdk1.7.0_67-cloudera
export HADOOP_HOME=/opt/cloudera/parcels/CDH/lib/hadoop
export SPARK_CONF_DIR=/etc/spark/conf
export SPARK_HOME=/opt/cloudera/parcels/CDH/lib/spark
export HADOOP_CONF_DIR=/etc/hadoop/conf
export LIVY_LOG_DIR=/var/log/livy
export LIVY_PID_DIR=/var/run/livy
export LIVY_SERVER_JAVA_OPTS="-Xmx2g"
  • spark-blacklist.conf修改
# vim spark-blacklist.conf
#
# Configuration override / blacklist. Defines a list of properties that users are not allowed
# to override when starting Spark sessions.
#
# This file takes a list of property names (one per line). Empty lines and lines starting with "#"
# are ignored.
#
# Disallow overriding the master and the deploy mode.
spark.master
spark.submit.deployMode
# Disallow overriding the location of Spark cached jars.
spark.yarn.jar
spark.yarn.jars
spark.yarn.archive
# Don't allow users to override the RSC timeout.
livy.rsc.server.idle-timeout
  • core-site.xml新增配置
<property>
    <name>hadoop.proxyuser.livy.groups</name>
    <value>*</value>
</property>
<property>
    <name>hadoop.proxyuser.livy.hosts</name>
    <value>*</value>
</property>
  • 创建livy的home目录在HDFS上
# kinit -kt livy.service.keytab livy/ip-172-31-21-83.ap-southeast-1.com
# hadoop fs -mkdir /user/livy
# hadoop fs -ls /user
  • 启动Livy
# sudo -u livy /opt/cloudera/livy/bin/livy-server start
  • 停止Livy
# sudo -u livy /opt/cloudera/livy/bin/livy-server stop

大数据视频推荐:
CSDN
大数据语音推荐:
ELK7 stack开发运维
企业级大数据技术应用
大数据机器学习案例之推荐系统
自然语言处理
大数据基础
人工智能:深度学习入门到精通

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值