1.检测文件某行中是否有指定内容
# 检测spark-on-yarn
- name: spark-on-yarn state
shell: "sed -n '/^export HADOOP_CONF_DIR=\\/etc\\/hadoop/p' {{ SPARK_CONF }}/spark-env.sh"
register: first_line_content
when: inventory_hostname in groups['spark_server']
failed_when: "first_line_content.stdout != 'export HADOOP_CONF_DIR=/etc/hadoop'"
tags:
- inspect_spark_server
2.执行hdfs命令,获取nn状态
- name: kinit hdfs
shell:
kinit -kt {{ BASE_DIR }}/keytab/hdfs.keytab hdfs/{{ hostvars[inventory_hostname]['HOSTNAME'] | lower }}@{{ KERBEROS_REALAMS }}
become: yes
become_user: hdfs
when: ENABLE_KERBEROS
tags:
- inspect_nn
- name: get service state for nn
shell: "{{ BASE_DIR }}/hadoop/bin/hdfs haadmin -getServiceState {{ item }}"
become: yes
become_user: hdfs
register: nn_state_output
with_items: "{{ group_names | select('match', 'nn*') | list }}"
failed_when: nn_state_output.stdout == ""
tags:
- inspect_nn
- name: display nn state
debug:
msg: "{{ item.stdout }}"
with_items: "{{ nn_state_output.results }}"
tags:
- inspect_nn
# 打印每个nn的状态,需要执行pip install jmepath
- name: display nn state
debug:
var: item.stdout
with_items: "{{ nn_state_output.results | json_query('[].stdout') }}"
tags:
- inspect_nn
3.删除hdfs的fsimage.csv文件
- name: kinit hdfs
shell: |
source /etc/profile
kinit -kt {{ BASE_DIR }}/keytab/hdfs.keytab hdfs/{{ HOSTNAME | lower }}@{{ KERBEROS_REALAMS }}
become: yes
become_user: hdfs
when: ENABLE_KERBEROS and groups['nn1'] is defined and inventory_hostname == groups['nn1'][0]
tags:
- uninstall_fsimage
- name: Find fsimage.csv files in hdfs
shell: |
source /etc/profile
hdfs dfs -find "{{ HDFS_FSIMAGE_PATH }}" -name 'fsimage.csv'
register: csv_files
ignore_errors: true
when: ENABLE_KERBEROS and groups['nn1'] is defined and inventory_hostname == groups['nn1'][0]
tags:
- uninstall_fsimage
- name: Delete found fsimage.csv files
shell: |
source /etc/profile
hdfs dfs -rm -r "{{ item.split()[-1] }}"
with_items: "{{ csv_files.stdout_lines }}"
when: ENABLE_KERBEROS and groups['nn1'] is defined and inventory_hostname == groups['nn1'][0]
tags:
- uninstall_fsimage