一、 主机规划
主机ip | 服务部署 |
---|---|
192.168.88.130 | kdcmaster |
192.168.88.131 | kdcslave |
二、安装过程
192.168.88.130 操作如下
2.1 192.168.88.130 安装
# yum -y install krb5-libs krb5-devel krb5-server krb5-workstation
1
2.2 192.168.88.130 修改配置文件如下
cat /etc/krb5.conf
#Configuration snippets may be placed in this directory as well
includedir /etc/krb5.conf.d/
[logging]
default = FILE:/var/log/krb5libs.log
kdc = FILE:/var/log/krb5kdc.log
admin_server = FILE:/var/log/kadmind.log
[libdefaults]
dns_lookup_realm = false
ticket_lifetime = 24h
renew_lifetime = 7d
forwardable = true
rdns = false
pkinit_anchors = /etc/pki/tls/certs/ca-bundle.crt
default_realm = HADOOP.COM
default_ccache_name = KEYRING:persistent:%{uid}
[realms]
HADOOP.COM = {
kdc = kdcmaster
admin_server = kdcmaster
}
[domain_realm]
.HADOOP.com = HADOOP.COM
HADOOP.com = HADOOP.COM
vi /var/kerberos/krb5kdc/kdc.conf
[kdcdefaults]
kdc_ports = 88
kdc_tcp_ports = 88
[realms]
HADOOP.COM = {
acl_file = /var/kerberos/krb5kdc/kadm5.acl
dict_file = /usr/share/dict/words
admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab
supported_enctypes = des3-hmac-sha1:normal arcfour-hmac:normal camellia256-cts:normal camellia128-cts:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal
2.3 192.168.88.130 初始化数据库
[root@kdcmaster ~]# kdb5_util create -s -r HADOOP.COM
Loading random data
Initializing database '/var/kerberos/krb5kdc/principal' for realm 'HADOOP.COM',
master key name 'K/M@HADOOP.COM'
You will be prompted for the database Master Password.
It is important that you NOT FORGET this password.
Enter KDC database master key:
Re-enter KDC database master key to verify:
添加管理员账号
[root@kdcmaster ~]# kadmin.local -q "addprinc admin/admin"
Authenticating as principal root/admin@HADOOP.COM with password.
WARNING: no policy specified for admin/admin@HADOOP.COM; defaulting to no policy
Enter password for principal "admin/admin@HADOOP.COM":
Re-enter password for principal "admin/admin@HADOOP.COM":
Principal "admin/admin@HADOOP.COM" created.
修改kadm5.acl
vi /var/kerberos/krb5kdc/kadm5.acl
*/admin@HADOOP.COM *
在kdcmaster上启动kdc和kadmin
[root@kdcmaster ~]# service krb5kdc start
Redirecting to /bin/systemctl start krb5kdc.service
[root@kdcmaster ~]# service kadmin start
Redirecting to /bin/systemctl start kadmin.service
192.168.88.131 kdcslave上安装kerberos
yum -y install krb5-libs krb5-devel krb5-server krb5-workstation
在kdcmaster上添加host key
[root@kdcmaster ~]# kadmin
Authenticating as principal admin/admin@HADOOP.COM with password.
Password for admin/admin@HADOOP.COM:
kadmin: addprinc -randkey host/kdcmaster
WARNING: no policy specified for host/kdcmaster@HADOOP.COM; defaulting to no policy
Principal "host/kdcmaster@HADOOP.COM" created.
kadmin: addprinc -randkey host/kdcslave
WARNING: no policy specified for host/kdcslave@HADOOP.COM; defaulting to no policy
Principal "host/kdcslave@HADOOP.COM" created.
生成host keytab
kadmin: ktadd host/kdcmaster
Entry for principal host/kdcmaster with kvno 2, encryption type des3-cbc-sha1 added to keytab FILE:/etc/krb5.keytab.
Entry for principal host/kdcmaster with kvno 2, encryption type arcfour-hmac added to keytab FILE:/etc/krb5.keytab.
Entry for principal host/kdcmaster with kvno 2, encryption type camellia256-cts-cmac added to keytab FILE:/etc/krb5.keytab.
Entry for principal host/kdcmaster with kvno 2, encryption type camellia128-cts-cmac added to keytab FILE:/etc/krb5.keytab.
Entry for principal host/kdcmaster with kvno 2, encryption type des-hmac-sha1 added to keytab FILE:/etc/krb5.keytab.
Entry for principal host/kdcmaster with kvno 2, encryption type des-cbc-md5 added to keytab FILE:/etc/krb5.keytab.
kadmin: ktadd -k /tmp/kerberos-1.keytab host/kdcslave
Entry for principal host/kdcslave with kvno 2, encryption type des3-cbc-sha1 added to keytab WRFILE:/tmp/kerberos-1.keytab.
Entry for principal host/kdcslave with kvno 2, encryption type arcfour-hmac added to keytab WRFILE:/tmp/kerberos-1.keytab.
Entry for principal host/kdcslave with kvno 2, encryption type camellia256-cts-cmac added to keytab WRFILE:/tmp/kerberos-1.keytab.
Entry for principal host/kdcslave with kvno 2, encryption type camellia128-cts-cmac added to keytab WRFILE:/tmp/kerberos-1.keytab.
Entry for principal host/kdcslave with kvno 2, encryption type des-hmac-sha1 added to keytab WRFILE:/tmp/kerberos-1.keytab.
Entry for principal host/kdcslave with kvno 2, encryption type des-cbc-md5 added to keytab WRFILE:/tmp/kerberos-1.keytab.
将/tmp/kerberos-1.keytab复制到kdcslave的/etc目录下,并命名为krb5.keytab
[root@kdcmaster ~]# scp /tmp/kerberos-1.keytab root@kdcslave:/etc/krb5.keytab
The authenticity of host 'kdcslave (172.16.16.82)' can't be established.
ECDSA key fingerprint is SHA256:mXyA1uwn8huNuzL3LPZMl1YU0lpoqKP093F88zWRONI.
ECDSA key fingerprint is MD5:f5:01:60:29:98:bb:b7:18:1b:a1:f2:4b:b5:20:37:4e.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'kdcslave,172.16.16.82' (ECDSA) to the list of known hosts.
root@kdcslave's password:
kerberos-1.keytab
修改kmaster上的/etc/krb5.conf,添加kdc条目
HADOOP.COM = {
kdc = kdcmaster
kdc = kdcslave
admin_server = kdcmaster
}
将kdcmaster的如下文件复制到kdcslave对应目录下
scp /etc/krb5.conf root@kdcslave:/etc/
scp /var/kerberos/krb5kdc/kdc.conf root@kdcslave:/var/kerberos/krb5kdc/
scp /var/kerberos/krb5kdc/kadm5.acl root@kdcslave:/var/kerberos/krb5kdc/
scp /var/kerberos/krb5kdc/.k5.HADOOP.COM root@kdcslave:/var/kerberos/krb5kdc/
在所有节点上创建
vi /var/kerberos/krb5kdc/kpropd.acl
host/kdcmaster@HADOOP.COM
host/kdcslave@HADOOP.COM
在kdcslave上启动kpropd
[root@kdcslave ~]# kpropd -dS
ready
waiting for a kprop connection
在kdcmaster上导出数据库,并同步到kdcslave
[root@kdcmaster ~]# kdb5_util dump /var/kerberos/krb5kdc/slave_datatrans
[root@kdcmaster ~]# kprop -f /var/kerberos/krb5kdc/slave_datatrans kdcslave
Database propagation to kdcslave: SUCCEEDED
kdcslave 上日志
[root@kdcslave ~]# kpropd -dS
ready
waiting for a kprop connection
Connection from kdcmaster
krb5_recvauth(4, kprop5_01, host/kdcslave@HADOOP.COM, ...)
authenticated client: host/kdcmaster@HADOOP.COM (etype == Triple DES cbc mode with HMAC/sha1)
Full propagation transfer started.
Full propagation transfer finished.
calling kdb5_util to load database
Load PID is 3565
Database load process for full propagation completed.
waiting for a kprop connection
此时启动kdcslave节点上的kdc,看看数据是否同步过来了
[root@kdcslave krb5kdc]# kadmin.local
Authenticating as principal root/admin@HADOOP.COM with password.
kadmin.local: list_principals
K/M@HADOOP.COM
admin/admin@HADOOP.COM
host/kdcmaster@HADOOP.COM
host/kdcslave@HADOOP.COM
kadmin/admin@HADOOP.COM
kadmin/changepw@HADOOP.COM
kadmin/kdcmaster.lan@HADOOP.COM
kiprop/kdcmaster.lan@HADOOP.COM
krbtgt/HADOOP.COM@HADOOP.COM
可以看见数据已经同步了,现在要做就是写个脚本定时同步数据库
vi /root/sync_db.sh
#!/bin/sh
kdclist="kdcslave"
echo `date`"start to sync!"
sudo kdb5_util dump /var/kerberos/krb5kdc/slave_datatrans
for kdc in $kdclist;
do
sudo kprop -f /var/kerberos/krb5kdc/slave_datatrans $kdc
done
echo `date`"end to sync!"
添加执行权限
chmod +x sync_db.sh
1
添加定时任务
crontab -e
*/1 * * * * /root/sync_db.sh >> /root/sync.log
1
测试,在kdcmaster添加用户usertest1
[root@kdcmaster ~]# kadmin.local
Authenticating as principal admin/admin@HADOOP.COM with password.
kadmin.local: add
addpol add_policy addprinc add_principal
kadmin.local: addprinc usertest1
WARNING: no policy specified for usertest1@HADOOP.COM; defaulting to no policy
Enter password for principal "usertest1@HADOOP.COM":
Re-enter password for principal "usertest1@HADOOP.COM":
Principal "usertest1@HADOOP.COM" created.
在kdcslave上查看,是否同步
[root@kdcslave krb5kdc]# kadmin.local
Authenticating as principal root/admin@HADOOP.COM with password.
kadmin.local: list_principals
K/M@HADOOP.COM
admin/admin@HADOOP.COM
host/kdcmaster@HADOOP.COM
host/kdcslave@HADOOP.COM
kadmin/admin@HADOOP.COM
kadmin/changepw@HADOOP.COM
kadmin/kdcmaster.lan@HADOOP.COM
kiprop/kdcmaster.lan@HADOOP.COM
krbtgt/HADOOP.COM@HADOOP.COM
usertest1@HADOOP.COM
数据同步已经完成
参考
在kdcmaster上快速安装kerberos,可参考https://blog.csdn.net/woloqun/article/details/76560173
https://web.mit.edu/kerberos/krb5-latest/doc/admin/install_kdc.html#install-the-replica-kdcs
https://docs.oracle.com/cd/E26926_01/html/E25889/seamtm-1.html#scrolltoc