DataX的使用场景及配置文件

  • 查询

数据库同步到数据库 (自定义SQL)

{
    "job": {
        "setting": {
            "errorLimit":{
                "percentage":0,
                "record":0
            },
            "speed":{
                "channel":5
            }
        },
        "content":[
            {
                "reader":{
                    "name":"mysqlreader",
                    "parameter":{
                        "username": "username",
                        "password": "password",
                        "connection": [
                          {
                            "jdbcUrl": ["jdbc:mysql://ip:port/databse"],
                            "querySql": ["select value1,value2 from tableName where 1=1"]
                             }
                        ]
                    }
                },
                "writer": {
                  "name": "mysqlwriter",
                  "parameter": {
                    "column": [
                                "value1",
                                "value2"
                            ],
                    "encoding": "UTF-8",
                    "connection": [
                      {
                        "jdbcUrl": "jdbc:mysql://127.0.0.1:3306/test",
                        "table":["tableName"]
                         }
                    ],
                    "username": "username",
                    "password": "password",
                    "preSql":[
                        "前置SQL"
                    ],
                    "postSql":[
                        "后置SQL"
                    ]
                  }
                }
            }
        ]
    }
}

数据库同步到数据库 (固定字段)

{
    "core": {
         "transport" : {
              "channel": {
                   "speed": {
                       "record": 1000
                    }
               }
         }
    },
    "job": {
        "setting": {
            "errorLimit":{
                "percentage":0,
                "record":0
            },
            "speed":{
                "channel":2,
                "record":1000
            }
        },
        "content":[
            {
                "reader":{
                    "name":"mysqlreader",
                    "parameter":{
                        "username": "username",
                        "password": "password",
                        "connection": [
                          {
                            "jdbcUrl": ["jdbc:mysql://ip:port/database"],
                            "table": ["table1"]
                          }
                        ],
                        "column": ["*"],
                        "where": "1=1"
                    }
                },
                "writer": {
                  "name": "mysqlwriter",
                  "parameter": {
                    "column": ["*"],
                    "encoding": "UTF-8",
                    "connection": [
                      {
                        "jdbcUrl": "jdbc:mysql://ip:port/database",
                        "table":["table2"]
                      }
                    ],
                    "password": "password",
                    "preSql":[
                        "前置sql"
                    ],
                    "username": "username"
                  }
                }
            }
        ]
    }
}

数据库同步到hdfs

{
    "job": {
        "setting": {
            "speed":{
                "channel":5
            }
        },
        "content":[
            {
                "reader":{
                    "name":"mysqlreader",
                    "parameter":{
                        "username": "username",
                        "password": "password",
                        "column":[
                            "value1",
                            "REPLACE(REPLACE(value2, CHAR(10), ''), CHAR(13), '')"
                        ],
                        "where": "valida_status ='1'",
                        "splitFactor": 3,
                        "samplePercentage": 1,
                        "connection": [
                          {
                            "table": [
                              "table_name"
                            ],
                            "jdbcUrl": [
                              "jdbc:mysql://ip:port/database"
                            ]
                          }
                        ]
                    }
                },
                "writer": {
                  "name": "hdfswriter",
                  "parameter": {
                    "column": [
                      {
                        "name": "value1",
                        "type": "string"
                      },
                      {
                        "name": "value2",
                        "type": "string"
                      }
                    ],
                    "encoding": "UTF-8",
                    "defaultFS": "hdfs://hdfsunity",
                    "fieldDelimiter": "\u0001",
                    "fileName": "fileName",
                    "fileType": "text",
                    "path": "filePath",
                    "writeMode": "truncate",
                    "hadoopConfig":{
                        "dfs.client.failover.proxy.provider.hdfsunity":"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider",
                        "dfs.ha.namenodes.hdfsunity":"r1,r2",
                        "dfs.namenode.rpc-address.hdfsunity.r1":"",
                        "dfs.namenode.rpc-address.hdfsunity.r2":"",
                        "dfs.nameservices":"hdfsunity",
                        "fs.defaultFS":"hdfs://hdfsunity"
                    },
                    "haveKerberos":true,
                    "kerberosKeytabFilePath":"",
                    "kerberosKrb5FilePath":"",
                    "kerberosPrincipal":"",
                  }
                }
            }
        ]
    }
}

hdfs同步到数据库

{
    "job": {
        "setting": {
            "speed":{
                "channel":5
            }
        },
        "content":[
            {
                "reader":{
                    "name":"hdfsreader",
                    "parameter":{
                        "column":[
                            {
                                "index":0,
                                "type":"string"
                            },
                            {
                                "index":1,
                                "type":"string"
                            }
                        ],
                        "defaultFS":"hdfs://hdfsunity",
                        "encoding":"UTF-8",
                        "fieldDelimiter":"\u0001",
                        "fileType":"TEXT",
                        "compress": "hadoop-snappy",
                        "hadoopConfig":{
                            "dfs.client.failover.proxy.provider.hdfsunity":"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider",
                            "dfs.ha.namenodes.hdfsunity":"r1,r2",
                            "dfs.namenode.rpc-address.hdfsunity.r1":"",
                            "dfs.namenode.rpc-address.hdfsunity.r2":"",
                            "dfs.nameservices":"hdfsunity",
                            "fs.defaultFS":"hdfs://hdfsunity"
                        },
                        "haveKerberos":true,
                        "kerberosKeytabFilePath":"",
                        "kerberosKrb5FilePath":"",
                        "kerberosPrincipal":"",
                        "path":"filePath"
                    }
                },
                "writer":{
                    "name":"mysqlwriter",
                    "parameter":{
                        "column":[
                            "value1",
                            "value2"
                        ],
                        "connection":[
                            {
                                "jdbcUrl": "jdbc:mysql://ip:port/database",
                                "table": ["tableName"]
                            }
                        ],
                        "username":"USERNAME",
                        "password":"PASSWORD",
                        "preSql":[
                            "前置SQL"
                        ],
                        "postSql":[
                            "后置SQL"
                        ]
                    }
                }
            }
        ]
    }
}

update

{
    "job": {
        "setting": {
            "speed": {
                "channel": 5
            }
        },
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {
                        "username": "username",
                        "password": "password",
                        "connection": [
                            {
                                "querySql": [
                                    "UPDATE ti_tower_modeling_in_init_pro SET status_code = 9 WHERE unicom_fee IS NULL"
                                ],
                                "jdbcUrl": [
                                    "jdbc:mysql://ip:port/database"
                                ]
                            }
                        ]
                    }
                },
                "writer": {
                    "name": "streamwriter",
                    "parameter": {
                        "print": true
                    }
                }
            }
        ]
    }
}

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值