1、脚本
{
"job": {
"setting": {
"speed": {
"channel": 3
}
},
"content": [
{
"reader": {
"name": "hdfsreader",
"parameter": {
"path": "/user/hive/warehouse/ods.db/t_copy/batch_no=20220221/*",
"defaultFS": "hdfs://node03:9000",
"column": ["*"],
"fileType": "text",
"encoding": "UTF-8",
"nullFormat":"",
"fieldDelimiter": "|"
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"writeMode": "insert",
"username": "root",
"password": "123456",
"column": [
"id",
"test_bigint",
"test_float",
"test_DOUBLE",
"test_TIMESTAMP",
"test_date",
"test_datetime",
"test_string",
"test_char",
"test_text",
"test_blob",
"test_decimal"
],
"session": [
"set session sql_mode='ANSI'"
],
"preSql": [
"truncate table t_copy"
],
"connection": [
{
"jdbcUrl": "jdbc:mysql://node03:3306/test?useUnicode=true&characterEncoding=gbk",
"table": [
"t_copy"
]
}
]
}
}
}
]
}
}
2、执行脚本
/opt/module/datax/bin/datax.py msyql_hive_ods_t_copy.json