hive到mysql数据同步_datax实战-hive同步数据到mysql

本文介绍了使用DataX实现从Hive到MySQL的数据同步过程,详细配置包括HDFSreader读取Hive数据,设置字段类型和分隔符,以及MySQLwriter写入数据,包括预清理SQL和插入模式。
摘要由CSDN通过智能技术生成

{

"core": {

"transport": {

"channel": {

"speed": {

"record": "10000",

"byte": 1048576

}

}

}

},

"job": {

"setting": {

"speed": {

"channel": 2,

"byte":1048576,

"record":10000

}

},

"content": [{

"reader": {

"name": "hdfsreader",

"parameter": {

"path": "/usr/hive/warehouse/reco_train.db/*****/*",

"defaultFS": "hdfs://HDFS*****/",

"column": [{

"index": 0,

"type": "string"

},

{

"index": 1,

"type": "string"

},

{

"index": 2,

"type": "string"

},

{

"index": 3,

"type": "string"

}

],

"fileType": "text",

"encoding": "UTF-8",

"fieldDelimiter": "\u0001",

"hadoopConfig":{

"dfs.nameservices": "HDFS*****",

"dfs.ha.namenodes.HDFS*****": "nn1,nn2",

"dfs.namenode.rpc-address.HDFS*****.nn1": "*****:4007",

"dfs.namenode.rpc-address.HDFS*****.nn2": "*****:4007",

"dfs.client.failover.proxy.provider.HDFS*****": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"

}

}

},

"writer": {

"name": "mysqlwriter",

"parameter": {

"writeMode": "insert",

"username": "*****",

"password": "*****",

"column": [

"sku",

"one_category",

"two_category",

"three_category"

],

"session": [

"set session sql_mode='ANSI'"

],

"preSql": [

"DELETE FROM *****"

],

"connection": [{

"jdbcUrl": "jdbc:mysql://*****:3306/databasesName?useUnicode=true&characterEncoding=UTF-8",

"table": [

"*****"

]

}]

}

}

}]

}

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值