spark 编程demo

1. 项目结构

2. pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.example</groupId>
    <artifactId>ciscdp_integration_add</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <maven.compiler.source>8</maven.compiler.source>
        <maven.compiler.target>8</maven.compiler.target>
        <scala.binary>2.11</scala.binary>
        <scala.version>2.11.8</scala.version>
        <spark.version>2.4.0</spark.version>
        <hutool.version>4.6.3</hutool.version>
        <JAR.SUFFIX></JAR.SUFFIX>
    </properties>

    <dependencies>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.25</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.3.7</version>
        </dependency>
        <dependency>
            <groupId>cn.hutool</groupId>
            <artifactId>hutool-all</artifactId>
            <version>${hutool.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_${scala.binary}</artifactId>
            <version>${spark.version}</version>
        </dependency>
        <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_${scala.binary}</artifactId>
        <version>${spark.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_${scala.binary}</artifactId>
            <version>${spark.version}</version>
            <!--<scope>provided</scope>-->
        </dependency>
<!--
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.26</version>
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>1.7.26</version>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.60</version>
            <scope>compile</scope>
        </dependency>-->
    </dependencies>

    <build>
        <sourceDirectory>src/main/scala</sourceDirectory>
        <plugins>
            <plugin>
                <groupId>org.scala-tools</groupId>
                <artifactId>maven-scala-plugin</artifactId>
                <version>2.15.2</version>
                <executions>
                    <execution>
                        <id>scala-compile-first</id>
                        <phase>process-resources</phase>
                        <goals>
                            <goal>compile</goal>
                        </goals>
                    </execution>
                </executions>
                <configuration>
                    <jvmArgs>
                        <jvmArg>-Dfile.encoding=UTF-8</jvmArg>
                        <jvmArg>-Xss256M</jvmArg>
                    </jvmArgs>
                </configuration>
                <dependencies>
                    <dependency>
                        <groupId>org.scala-lang</groupId>
                        <artifactId>scala-library</artifactId>
                        <version>${scala.version}</version>
                    </dependency>
                </dependencies>
            </plugin>

            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-shade-plugin</artifactId>
                <version>2.3</version>
                <executions>
                    <execution>
                        <phase>package</phase>
                        <goals>
                            <goal>shade</goal>
                        </goals>
                    </execution>
                </executions>
                <configuration>
                    <filters>
                        <filter>
                            <artifact>*:*</artifact>
                            <excludes>
                                <exclude>META-INF/*.SF</exclude>
                                <exclude>META-INF/*.DSA</exclude>
                                <exclude>META-INF/*.RSA</exclude>
                            </excludes>
                        </filter>
                    </filters>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.6.2</version>
                <configuration>
                    <source>1.7</source>
                    <target>1.7</target>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-assembly-plugin</artifactId>
                <version>2.6</version>
                <configuration>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

3. 主类

package com.cdp.spark

import cn.hutool.core.lang.Dict
import cn.hutool.core.util.StrUtil
import com.cdp.util.{ParamUtil, Props, SparkBase}
import org.apache.spark.sql._
import org.slf4j.{Logger, LoggerFactory}

/**
 * @Author kevinwyu@deloitte.com.cn
 * @date 30/07/2021 10:48
 * @description 对source_dms.a_dms_customer按指定逻辑更新deleted字段为1(全量or增量)
 *
 */
object CDPCustomerMergeACustomerSpark extends SparkBase {

  override val log: Logger = LoggerFactory.getLogger(this.getClass)

  /**
   * 任务入口
   *
   * @param spark      SparkSession
   * @param dbInfo     数据库表名
   * @param updateType 更新类型
   * @param dealDate   增量日期
   */
  def task(spark: SparkSession, dbInfo: (String, String), updateType: String, dealDate: String): Unit = {
    preDeal(spark, dbInfo)
    customerMergeUpdate(spark, dbInfo, updateType, dealDate)
    afterDeal(spark, dbInfo)
  }

  /**
   * 预处理 初始化临时表
   *
   * @param spark  SparkSession
   * @param dbInfo 数据库表名
   */
  def preDeal(spark: SparkSession, dbInfo: (String, String)): Unit = {
    afterDeal(spark, dbInfo)
    spark.sql(StrUtil.format(Props.getSql("a_dms_customer_temp_create_sql"), Dict.create().set("db", dbInfo._1).set("tempTableName", dbInfo._2)))
    log.info("preDeal success.")
  }

  /**
   * 收尾处理 清理临时表
   *
   * @param spark  SparkSession
   * @param dbInfo 数据库表名
   */
  def afterDeal(spark: SparkSession, dbInfo: (String, String)): Unit = {
    spark.sql(StrUtil.format(Props.getSql("a_dms_customer_temp_drop_sql"), Dict.create().set("db", dbInfo._1).set("tempTableName", dbInfo._2)))
    log.info("afterDeal success.")
  }

  /**
   * 更新逻辑处理
   *
   * @param spark      SparkSession
   * @param dbInfo     数据库表名
   * @param updateType 更新类型
   * @param dealDate   增量日期
   */
  def customerMergeUpdate(spark: SparkSession, dbInfo: (String, String), updateType: String, dealDate: String): Unit = {
    var suffix = ""
    if (updateType.equals("increment")) {
      suffix = s" and b.day=$dealDate "
    }
    val updateSql = StrUtil.format(Props.getSql("update_a_dms_customer_sql_temp") + suffix, Dict.create().set("db", dbInfo._1).set("tempTableName", dbInfo._2))
    val resultSql = StrUtil.format(Props.getSql("update_a_dms_customer_all_sql_result"), Dict.create().set("db", dbInfo._1).set("tempTableName", dbInfo._2))

    //插入临时表
    log.info("update_a_dms_customer_sql_temp:" + updateSql)
    spark.sql(updateSql)
    // 更新到结果表
    log.info("update_a_dms_customer_sql_result:" + resultSql)
    spark.sql(resultSql)
    log.info("customerMergeUpdate success.")
  }

  /**
   * 程序入口
   *
   * @param args args
   *             args(0):deploymentEnv : pro or test
   *             args(1):updateType : all or increment
   *             args(2): dealDate: increment date
   */
  def main(args: Array[String]): Unit = {
    val appName = "CDPCustomerMergeACustomerSpark"
    ParamUtil.checkPram(args, appName)
    val deploymentEnv = args(0)
    val updateType = args(1)
    var dealDate = ""
    if (args.length == 3) {
      dealDate = args(2)
    }
    val spark = getContext(appName)
    val dbInfo = ParamUtil.getDataBaseName(deploymentEnv, appName)
    task(spark, dbInfo, updateType, dealDate)
    spark.stop()
  }
}

 4.入参相关工具类

package com.cdp.util

import org.slf4j.{Logger, LoggerFactory}

import java.util.regex.Pattern

/**
 * @Author kevinwyu@deloitte.com.cn
 * @date 30/07/2021 10:48
 * @description 入参相关工具类
 *
 */
object ParamUtil {
  val log: Logger = LoggerFactory.getLogger(this.getClass)
  val devDbName: String = Props.get("dev.hive.db.name")
  val proDbName: String = Props.get("pro.hive.db.name")
  val customerTempTable: String = Props.get("a_dms_customer_temp_table")
  val prospectTempTable: String = Props.get("a_dms_prospect_temp_table")
  val handoverTempTable: String = Props.get("a_dms_handover_temp_table")
  val deploymentEnvList = List("test", "pro")
  val updateTypeList = List("all", "increment")

  /**
   * 获取数据库表环境
   *
   * @param deploymentEnv 部署环境
   * @param appName       应用名
   * @return 数据库表名
   */
  def getDataBaseName(deploymentEnv: String, appName: String): (String, String) = {
    var dbName = ""
    var tempTableName = ""
    if (deploymentEnv.equals("pro")) {
      dbName = proDbName
    } else {
      dbName = devDbName
    }
    if (appName.equals("CDPCustomerMergeACustomerSpark")) {
      tempTableName = customerTempTable
    } else if (appName.equals("CDPCustomerMergeAProspestSpark")) {
      tempTableName = prospectTempTable
    } else if (appName.equals("CDPDistinctAHandoverSpark")) {
      tempTableName = handoverTempTable
    }
    (dbName, tempTableName)
  }

  /**
   * 校验 a_dms_customer、a_dms_prospect表更新逻辑的入参
   *
   * @param args args
   */
  def checkPram(args: Array[String], appName: String): Unit = {

    if (args.length < 2 || args.length > 3) {
      log.error(" param number is not correct,the logic only support two or three param")
      System.exit(-1)
    }
    if (args.length >= 2) {
      if (!deploymentEnvList.contains(args(0))) {
        log.error("deploymentEnv param must be in (test,pro)")
        System.exit(-1)
      }

      if (!updateTypeList.contains(args(1))) {
        log.error("updateType param must be in (all,increment)")
        System.exit(-1)
      }

      if (args.length == 2 && args(1).equals("increment")) {
        log.error("not all update , must give dealDate,please give the third param, such as 20210801 ")
        System.exit(-1)
      }
      if (args.length == 3) {
        log.info("increment date:" + args(2))
        if (!parseDate(args(2))) {
          log.error("increment date is illegal,should input legal number such as 20210801 ")
          System.exit(-1)
        }
      }
    }
  }

  /**
   * 校验 a_dms_handover 表去重逻辑的入参
   *
   * @param args args
   */
  def checkHandoverParam(args: Array[String]): Unit = {
    if (args.length != 1) {
      log.error(" param number is not correct,the logic only support one param ")
      System.exit(-1)
    }
    if (!deploymentEnvList.contains(args(0))) {
      log.error("deploymentEnv param must be in (test,pro)")
      System.exit(-1)
    }
  }

  /**
   * 校验日期 date
   *
   * @param dealDate 增量日期
   * @return Boolean
   */
  def parseDate(dealDate: String): Boolean = {
    val el = "((\\d{3}[1-9]|\\d{2}[1-9]\\d|\\d[1-9]\\d{2}|[1-9]\\d{3})(((0[13578]|1[02])(0[1-9]|[12]\\d|3[01]))|((0[469]|11)(0[1-9]|[12]\\d|30))|(02(0[1-9]|[1]\\d|2[0-8]))))|(((\\d{2})(0[48]|[2468][048]|[13579][26])|((0[48]|[2468][048]|[3579][26])00))0229)"
    val pat = Pattern.compile(el)
    val matcher = pat.matcher(dealDate);
    matcher.matches()
  }
}

5.配置文件类

package com.cdp.util

import java.util.Properties

/**
 * @Author kevinwyu@deloitte.com.cn
 * @date 30/07/2021 10:48
 * @description 获取配置文件参数
 *
 */
object Props {
  private val properties = new Properties()
  private val sqlProperties = new Properties()
  properties.load(Thread.currentThread().getContextClassLoader.getResourceAsStream("application.properties"))
  sqlProperties.load(Thread.currentThread().getContextClassLoader.getResourceAsStream("logic_sql.sql"))

  /**
   * 加载 application.properties 获取value
   *
   * @param field field
   * @return value
   */
  def get(field: String): String = properties.getProperty(field)

  /**
   * 加载logic_sql.sql 获取value
   *
   * @param field field
   * @return value
   */
  def getSql(field: String): String = sqlProperties.getProperty(field)
}

6.sparkSession类

package com.cdp.util

import org.apache.spark.sql.SparkSession
import org.slf4j.{Logger, LoggerFactory}

/**
 * @Author kevinwyu@deloitte.com.cn
 * @date 30/07/2021 10:48
 * @description 获取SparkSession
 *
 */
class SparkBase {
  @transient
  protected val log: Logger = LoggerFactory.getLogger(this.getClass)

  /**
   * 获取 SparkSession
   *
   * @param name appName
   * @return SparkSession
   */
  def getContext(name: String): SparkSession =
    SparkSession.builder().appName(name)
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      //.master("local[*]")
      .config("hive.exec.dynamic.partition", true) // 支持 Hive 动态分区
      .config("hive.exec.dynamic.partition.mode", "nonstrict") // 非严格模式
      .config("spark.sql.sources.partitionOverwriteMode", "dynamic")
      .config("spark.sql.adaptive.enabled", true)
      .enableHiveSupport()
      .getOrCreate()

}

7.application.properties


#开发或测试环境数据库
dev.hive.db.name=int_ads_model
#生产环境数据库
pro.hive.db.name=source_dms
#临时表表名
a_dms_customer_temp_table=a_dms_customer_temp
a_dms_prospect_temp_table=a_dms_prospect_temp
a_dms_handover_temp_table=a_dms_handover_temp

8.logic_sql.sql

-- =========================================================================
-- Project         :ciscdp_integration
-- Filename        :logic_sql.sql
-- Description     :更新a_dms_customer、a_dms_prospect、a_dms_handover表相关中间表建表删表语句以及更新逻辑sql语句
-- Author          : Yu Wei
-- =========================================================================
--Change Log
-- 2021-08-03: init version - Yu Wei
--
--创建临时中间表
a_dms_customer_temp_create_sql=create table if not exists {db}.{tempTableName} like {db}.a_dms_customer
a_dms_prospect_temp_create_sql=create table if not exists {db}.{tempTableName} like {db}.a_dms_prospect
a_dms_handover_temp_create_sql=create table if not exists {db}.{tempTableName} like {db}.a_dms_handover

--删除临时中间表
a_dms_customer_temp_drop_sql=drop table if exists {db}.{tempTableName}
a_dms_prospect_temp_drop_sql=drop table if exists {db}.{tempTableName}
a_dms_handover_temp_drop_sql=drop table if exists {db}.{tempTableName}

--更新数据到a_dms_customer临时表(全量or增量)
update_a_dms_customer_sql_temp=insert overwrite table {db}.{tempTableName} \
select \
a.dealerid \
,a.customerid \
,a.name1 \
,a.name2 \
,a.nationalid \
,a.lastvisit \
,a.phone \
,a.address1 \
,a.address2 \
,a.town \
,a.postcode \
,a.country \
,a.mobilephone \
,a.fax \
,a.subdealer \
,a.email \
,case when b.dealerid is not null then 1 else a.deleted end deleted \
,a.cust_type \
,a.titel \
,a.btwnummer \
,a.banknummer \
,a.klantgroep \
,a.btwc \
,a.kortc \
,a.taal \
,a.aantopd \
,a.warn \
,a.currency_c \
,a.franch \
,a.banka \
,a.dic \
,a.bb_note \
,a.betaal \
,a.telefoon_2 \
,a.custinfo \
,a.mod_date \
,a.mod_user \
,a.cre_date \
,a.cre_user \
,a.religion \
,a.birthday \
,a.pass_disc \
,a.id_number \
,a.to_contact \
,a.education \
,a.profession \
,a.industrie \
,a.hobby1 \
,a.hobby2 \
,a.hobby3 \
,a.sourcep \
,a.membership \
,a.pref_cont \
,a.fax_priv \
,a.email_priv \
,a.upd_reason \
,a.salesman \
,a.custage \
,a.sentflag \
,a.packageno \
,a.partner_typ \
,a.comp_name \
,a.lic_year \
,a.hadres \
,a.companyid \
,a.time_stamp \
,a.gtype \
,a.national_fleet_no \
,a.jobtitle \
,a.mobilephone_clean \
,a.import_id \
,a.data_privacy \
,a.stop_all_comm \
,a.interest_in_whf \
,a.whf_customer \
,a.moduserid \
,a.moduserrole \
,a.creatorid \
,a.creatorrole \
,a.salesmanid \
,a.salesmanrole \
,a.loyaltyid \
,a.nomail \
,a.fday \
from {db}.a_dms_customer a \
left join {db}.dms_customer_merge b \
on a.dealerid = b.dealerid and a.customerid = b.customerid_fr

--全量更新数据到a_dms_customer结果表
update_a_dms_customer_all_sql_result=insert overwrite table {db}.a_dms_customer \
select * \
from \
{db}.{tempTableName} a

--更新数据到a_dms_prospect临时表(全量or增量)
update_a_dms_prospect_sql_temp=insert overwrite table {db}.{tempTableName} \
select \
a.oth_age2 \
,a.referral_relation \
,a.birthday \
,a.industrie \
,a.oth_mak1 \
,a.oth_mil1 \
,a.membership \
,a.pros_text \
,a.dic \
,a.time_stamp \
,a.stop_all_comm \
,a.oth_type1 \
,a.cre_user \
,a.sourcep \
,a.oth_age1 \
,a.email \
,a.salesmanrole \
,a.dealerid \
,a.telefoon_2 \
,a.job_title \
,a.intin \
,a.salesman \
,a.qualify \
,a.name2 \
,a.postcode \
,a.mobilephone_clean \
,a.upd_reason \
,a.email_priv \
,a.moduserrole \
,a.hadres_2 \
,a.cre_date \
,a.oth_mak3 \
,a.interest_in_whf \
,a.country \
,a.address2 \
,a.age \
,a.whf_customer \
,a.mobilephone \
,a.mod_date \
,a.companyid \
,a.lic_year \
,a.cust_stat \
,a.hobby2 \
,a.customerid \
,a.pref_col2 \
,a.gtype \
,a.crea_inter \
,a.fax_priv \
,a.custinfo \
,a.oth_mak2 \
,a.creatorrole \
,a.hobby \
,a.fax \
,a.education \
,a.int_purch \
,a.oth_mil2 \
,a.sub_source \
,a.name1 \
,a.partner_typ \
,a.pros_flag \
,a.address1 \
,a.to_contact \
,a.oth_age3 \
,a.act_date \
,a.hobby1 \
,a.national_fleet_no \
,a.profession \
,a.oth_type3 \
,a.pros_source \
,a.pass_disc \
,a.kids \
,a.hobby3 \
,a.pref_col1 \
,a.packageno \
,a.oth_mil3 \
,a.import_id \
,a.cust_type \
,a.salesmanid \
,a.pref_e \
,a.id_number \
,a.oth_type2 \
,a.tempera \
,a.pref_cont \
,a.religion \
,a.town \
,a.moduserid \
,a.comp_name \
,a.cont_info \
,a.klantgroep \
,a.pref_model \
,a.last_contd \
,a.mod_user \
,a.hadres \
,a.creatorid \
,case when b.dealerid is not null then 1 else a.deleted end deleted \
,a.phone \
,a.data_privacy \
,a.sentflag \
,a.titel \
,a.recommender_id \
,a.rep_int1 \
,a.nomail \
,a.goldmineid \
,a.rep_int2 \
,a.rep_int3 \
,a.fday \
from \
{db}.a_dms_prospect a \
left join {db}.dms_customer_merge b on \
a.dealerid = b.dealerid \
and a.customerid = b.customerid_fr

--全量更新a_dms_prospect到结果表
update_a_dms_prospect_all_sql_result= insert overwrite table {db}.a_dms_prospect \
select * \
from {db}.{tempTableName}

--去重数据到a_dms_handover临时表(全量)
distinct_a_dms_handover_sql_temp=insert overwrite table {db}.{tempTableName} \
select term_service_begins \
,paym \
,uch_warrstartmileage \
,rep_int \
,term_service_expires \
,uch_retailprice \
,ret_price \
,uc_replacemodel \
,time_stamp \
,name2ecallp1 \
,connecteddrive \
,uch_retailinvoicedate \
,extn2 \
,uc_firstreg \
,phoneecallp2 \
,purchase_d \
,relsecallp1 \
,dealerid \
,private_customer_id \
,soldby_dms_id \
,contractno \
,uch_warrenddate \
,uc_samecustom \
,uch_warrendmileage \
,name1ecallp1 \
,klantnr \
,cancel_d \
,eser \
,name2ecallp3 \
,cre_date \
,extn1 \
,phoneecallp3 \
,cd_communication_channel \
,tradeveh \
,itemcode \
,order_no \
,relsecallp2 \
,invoice_no \
,name1ecallp3 \
,cd_home_region \
,id_cert_expiration_d \
,cust_stat \
,id_cert_number \
,chassis \
,int_purch \
,id_cert_type \
,name1ecallp2 \
,soldby \
,pin_cd \
,uc_replacebrand \
,tradein \
,uch_purchaseprice \
,uc_vin \
,relsecallp3 \
,packageno \
,uch_warrstartdate \
,import_id \
,phoneecallp1 \
,name2ecallp2 \
,handover_d \
,new_used \
,vehiclesource \
,vin_17 \
,contract_status \
,uch_stockstartdate \
,uc_mileage \
,uch_firstregdate \
,deleted \
,sentflag \
,loyaltyid \
,loyalty_card_id \
,three_r_party_bank \
,pros_flag \
,fday \
from \
( \
select \
*, \
row_number () over(partition by dealerid \
,klantnr \
,vin_17 \
,order_no \
,handover_d \
order by \
cancel_d desc \
,fday desc )rn \
from \
{db}.a_dms_handover )a \
where \
a.rn = 1

--全量去重到a_dms_handover结果表
distinct_a_dms_handover_all_sql_result=insert overwrite table {db}.a_dms_handover \
select * \
from {db}.{tempTableName}

9. airflow dag脚本

import sys
from functools import partial
import airflow_utli
# The DAG object; we'll need this to instantiate a DAG
from airflow import DAG
# Operators; we need this to operate!
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.http_operator import SimpleHttpOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.operators.mysql_operator import MySqlOperator
from airflow.utils.dates import days_ago

# =========================================================================
# Project         :ciscdp_integration
# Filename        :test_dms_a_table_update_or_distinct_increment.py
# Description     :测试环境增量更新a_dms_customer、a_dms_prospect、a_dms_handover表
# Author          :Yu Wei
# Python Version  :2.7
# =========================================================================
#Change Log
# 2021-08-03: init version - Yu Wei
#
dag = DAG('test_update_or_distinct_source_dms_increment', default_args=airflow_utli.test_default_args, schedule_interval='16 17 * * *', concurrency=8,catchup=False)

FIRST_PRIORITY_TABLE_LIST = ['a_dms_customer', 'a_dms_prospect', 
                             'a_dms_handover']

BASE_DIR = '/data01/users/vi081731/kevy'

SPARK_COMMAND = '''
hostname && \
export HADOOP_USER_NAME='vifk1316' && \
spark-submit \
--master local \
--class {class_name} \
{base_dir}/lib/cdp_integration-1.0-SNAPSHOT.jar {param}
'''

# Description param :第一个参数 部署环境 测试or生产( test or pro)
#        第二个参数 全量or增量 (all or increment)
#        第三个参数,dms_customer_merge表增量数据所在日期,对应dms_customer_merge表的day字段(例如:20210801)
# Remarks: 更新a_dms_customer、a_dms_prospect表时,第一个参数和第二个参数必填,且当第二个参数为all时,第三个参数必填;
#          更新a_dms_handover表时,只填且必填第一个参数
def deal_a_table_task(table_name):
    class_name =''
    param ='' 
    if table_name == 'a_dms_customer':
        class_name = 'com.cdp.spark.CDPCustomerMergeACustomerSpark'
        param = "test increment {{ds_nodash}}"
    if table_name == 'a_dms_prospect':
        class_name = 'com.cdp.spark.CDPCustomerMergeAProspestSpark'
        param = "test increment {{ds_nodash}}"
    if table_name == 'a_dms_handover':	
        class_name = 'com.cdp.spark.CDPDistinctAHandoverSpark'
        param = "test"
    queue = "cdp_important"
    task = SSHOperator(
        dag=dag, task_id=f'update_or_distinct_table_{table_name}', ssh_conn_id='test_0802',
        command=SPARK_COMMAND.format(
            queue=queue, base_dir=BASE_DIR, class_name=class_name,
            param=param))
    return task

# 执行依赖     
deal_a_table_task('a_dms_handover')>>deal_a_table_task('a_dms_customer')>>deal_a_table_task('a_dms_prospect') 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值