Airflow1.10.11 之 dag 的 SSHOperator

1, 准备脚本

[root@do-airflow ~]# vi test.b.script.sh
#!/bin/bash

S_FILE=""

S_DAY=$3
if [ -z $S_DAY ]; then
	S_DAY=`date '+%Y%m%d'`
fi
 

case $2 in 
"1")
	S_FILE="/root/$S_DAY.$1.1.log"
	;;
"2")
	S_FILE="/root/$S_DAY.$1.2.log"
	;;
"3")
	S_FILE="/root/$S_DAY.$1.3.log"
	;;
*)
	S_FILE=""
	;;
esac

if [[ $S_FILE == "" ]]; then
	exit
fi

rm -f $S_FILE

I=0
while true; do
	S_MSG=`date "+%Y-%m-%d %H:%M:%S"`
	echo $S_MSG
	echo $S_MSG >> $S_FILE
	((I=I+1))
	if [[ $I == 10 ]]; then
		break
	fi
	sleep 1	
	
done

2, 配置 connection

命令行方式

# 添加
[root@do-airflow ~]# airflow connections -a \
--conn_id ssh.192.168.109.131 \
--conn_type SSH \
--conn_host 192.168.109.131 \
--conn_login root \
--conn_password <你的密码> \
--conn_port 22

Successfully added `conn_id`=ssh.192.168.109.131 : SSH://root:******@192.168.109.131:22
[root@do-airflow ~]# 

# 显示
[root@do-airflow ~]# airflow connections -l
╒═══════════════════════╤═════════════╤═══════════════════╤════════╤════════════════╤══════════════════════╤═════════╕
│ Conn Id               │ Conn Type   │ Host              │   Port │ Is Encrypted   │ Is Extra Encrypted   │ Extra   │
╞═══════════════════════╪═════════════╪═══════════════════╪════════╪════════════════╪══════════════════════╪═════════╡
│ 'ssh.192.168.109.131''SSH''192.168.109.131' │     22 │ True           │ False                │ None    │
╘═══════════════════════╧═════════════╧═══════════════════╧════════╧════════════════╧══════════════════════╧═════════╛
[root@do-airflow ~]# 

# 删除
[root@do-airflow ~]# airflow connections -d --conn_id ssh.192.168.109.131

Successfully deleted `conn_id`=ssh.192.168.109.131
[root@do-airflow ~]# 

Web UI 方式
在这里插入图片描述

3, 准备 dag

# 安装依赖包
[root@do-airflow ~]# pip3 install -i https://pypi.tuna.tsinghua.edu.cn/simple paramiko sshtunnel
[root@do-airflow ~]# 

[root@do-airflow ~]# vi /opt/airflow/dags/d_hello.py
import airflow
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.contrib.operators.ssh_operator import SSHOperator
from datetime import timedelta

default_args = {
		'owner': 'dosrain',
		'depends_on_past': False,
		'start_date': airflow.utils.dates.days_ago(2)
}

dag = DAG(
		dag_id='d_hello',
		default_args=default_args,
		description='my first DAG',
		schedule_interval=None)
	
# 一期汇聚	

a1_operator = SSHOperator(
		ssh_conn_id = 'ssh.192.168.109.131',
		task_id='a1_task',
		command='/root/test.script.sh a 1',
		dag=dag)

# 一期入库
a2_operator = SSHOperator(
		ssh_conn_id = 'ssh.192.168.109.131',
		task_id='a2_task',
		command='/root/test.script.sh a 2',
		dag=dag)

	
# 二期汇聚	
b1_operator = SSHOperator(
		ssh_conn_id = 'ssh.192.168.109.131',
		task_id='b1_task',
		command='/root/test.script.sh b 1',
		dag=dag)

# 二期入库
b2_operator = SSHOperator(
		ssh_conn_id = 'ssh.192.168.109.131',
		task_id='b2_task',
		command='/root/test.script.sh b 2',
		dag=dag)

# Oracle汇聚
c1_operator = SSHOperator(
		ssh_conn_id = 'ssh.192.168.109.131',
		task_id='c1_task',
		command='/root/test.script.sh c 1',
		dag=dag)

a1_operator>>a2_operator
a1_operator>>b1_operator
b1_operator>>b2_operator
a2_operator>>c1_operator
b2_operator>>c1_operator

[root@do-airflow ~]# python3 /opt/airflow/dags/d_hello.py
[root@do-airflow ~]# airflow list_tasks d_hello
[2020-07-28 10:03:21,524] {__init__.py:50} INFO - Using executor LocalExecutor
[2020-07-28 10:03:21,525] {dagbag.py:396} INFO - Filling up the DagBag from /opt/airflow/dags
a1_task
a2_task
b1_task
b2_task
c1_task
[root@do-airflow ~]#

4, 触发 dag

[root@do-airflow ~]# rm -f *.log

# 启用 d_hello
[root@do-airflow ~]# airflow unpause d_hello
[2020-07-28 10:12:16,131] {__init__.py:50} INFO - Using executor LocalExecutor
[2020-07-28 10:12:16,132] {dagbag.py:396} INFO - Filling up the DagBag from /opt/airflow/dags/d_hello.py
Dag: d_hello, paused: False

# 触发 d_hello,注意,是带参数的
[root@do-airflow ~]# airflow trigger_dag -c '{"sday":"20200501"}' d_hello
[2020-07-28 10:13:12,815] {__init__.py:50} INFO - Using executor LocalExecutor
[2020-07-28 10:13:12,816] {dagbag.py:396} INFO - Filling up the DagBag from /opt/airflow/dags/d_hello.py
Created <DagRun d_hello @ 2020-07-28 10:13:12+08:00: manual__2020-07-28T10:13:12+08:00, externally triggered: True>


# 查看结果文件
[root@do-airflow ~]# ll *.log
-rw-r--r--. 1 root root 200 Jul 28 10:13 20200728.a.1.log
-rw-r--r--. 1 root root 200 Jul 28 10:13 20200728.a.2.log
-rw-r--r--. 1 root root 200 Jul 28 10:13 20200728.b.1.log
-rw-r--r--. 1 root root 200 Jul 28 10:13 20200728.b.2.log
-rw-r--r--. 1 root root 200 Jul 28 10:14 20200728.c.1.log
[root@do-airflow ~]#

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值