Datax常用json汇总

Datax执行命令

 执行命令:
 python /opt/installs/datax/datax.py   /opt/installs/hdfs2ftp.json 
 python  /opt/installs/datax/datax.py    -r ftpreader -w hdfswriter
 通过浏览器可以进行dataxweb的访问,具体地址如下:
 http://hadoop13:9527/index.html

mysql2mysql

{
    "job": {
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {  "encoding": "UTF-8",
                        "column": ["uid","nick_name","achievement","level","job","register_time"],
                        "connection": [
                            {
                                "jdbcUrl": ["jdbc:mysql://hadoop10:3306/sqoop?characterEncoding=UTF-8"],
                                "table": ["user_info"]
                            }
                        ],
                        "password": "123456",
                        "username": "root" 
                    }
                },
                "writer": {
                    "name": "mysqlwriter",
                    "parameter": {  "encoding": "UTF-8",
                        "column": ["uid","nick_name","achievement","level","job","register_time"],
                        "connection": [
                            {
                                "jdbcUrl": "jdbc:mysql://hadoop13:3306/datax?characterEncoding=UTF-8",
                                "table": ["user_info"]
                            }
                        ],
                        "password": "123456",
                        "preSql": ["truncate table user_info"],
                        "session": [],
                        "username": "root",
                        "writeMode": "insert"
                    }
                }
            }
        ],
        "setting": {
            "speed": {
                "channel": "1"
            }
        }
    }
}

mysql2hdfs

{
    "job": {
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {
                        "connection": [
                            {
                                "jdbcUrl": ["jdbc:mysql://hadoop10:3306/sqoop?characterEncoding=UTF-8"],
                                "querySql": [
                                    " select device_id,profile,blog_url from user_submit;"
                                ]
                            }
                        ],
                        "password": "123456",
                        "username": "root",
                    }
                },
                "writer": {
                    "name": "hdfswriter",
                    "parameter": {
					"encoding": "UTF-8",
                        "column": [{
                         		"name": "d1",
                         		"type": "STRING"
                     		},
                     		{
                        		"name": "p1",
                         		"type": "STRING"
                     		} ,
                     		{
                        		"name": "b1",
                         		"type": "STRING"
                     		} 
							],
                        "compress": "",
                        "defaultFS": "hdfs://hadoop10:9000",
                        "fieldDelimiter": "\t",
                        "fileName": "user_submit",
                        "fileType": "text",
                        "path": "/in/datax/",
                        "writeMode": "append"
                    }
                }
            }
        ],
        "setting": {
            "speed": {
                "channel": "2"
            }
        }
    }
}

mysql2HaHdfs

{
    "job": {
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {
                        "connection": [
                            {
                                "jdbcUrl": ["jdbc:mysql://hadoop10:3306/sqoop?characterEncoding=UTF-8"],
                                "querySql": [
                                    "select device_id,profile,blog_url from user_submit;"
                                ]
                            }
                        ],
                        "password": "123456",
                        "username": "root",
                    }
                },
                      "writer": {
                    "name": "hdfswriter",
                    "parameter": {
                       "column": [{
                         		"name": "d1",
                         		"type": "STRING"
                     		},
                     		{
                        		"name": "p1",
                         		"type": "STRING"
                     		} ,
                     		{
                        		"name": "b1",
                         		"type": "STRING"
                     		} 
							],
                        "compress": "",
                        "defaultFS": "hdfs://hdfs-cluster",
						"hadoopConfig":{
								"dfs.nameservices": "hdfs-cluster",
								"dfs.ha.namenodes.hdfs-cluster": "nn1,nn2",
								"dfs.namenode.rpc-address.hdfs-cluster.nn1": "hadoop11:9000",
								"dfs.namenode.rpc-address.hdfs-cluster.nn2": "hadoop12:9000",
								"dfs.client.failover.proxy.provider.hdfs-cluster": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
						},
                        "fieldDelimiter": "\t",
                        "fileName": "mysql2HA",
                        "fileType": "text",
                        "path": "/input/datax",
                        "writeMode": "append"
                    }
                }
            }
        ],
        "setting": {
            "speed": {
                "channel": "3"
            }
        }
    }
}

mysql2ftp

{
    "job": {
        "setting": {
            "speed": {
                "channel": 2
            }
        },
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {
                        "connection": [
                            {
                                "jdbcUrl": ["jdbc:mysql://hadoop10:3306/sqoop"],
                                "querySql": [
                                    "select device_id,profile,blog_url from user_submit;"
                                ]
                            }
                        ],
                        "password": "123456",
                        "username": "root",
                    }
                },
                "writer": {
                    "name": "ftpwriter",
                    "parameter": {
                        "protocol": "sftp",
                        "host": "hadoop10",
                        "port": 22,
                        "username": "root",
                        "password": "123456",
                        "timeout": "60000",
                        "connectPattern": "PASV",
                        "path": "/opt/data/datax",
                        "fileName": "sqoop_mysql2ftp",
                        "writeMode": "truncate",
                        "fieldDelimiter": "\t",
                        "encoding": "UTF-8",
                        "nullFormat": "\\N",
                        "dateFormat": "yyyy-MM-dd",
                        "fileFormat": "csv",
			            "suffix": ".csv",
                        "header": []
                    }
                }
            }
        ]
    }
}

hdfs2mysql

{
    "job": {
        "setting": {
            "speed": {
                "channel": 2
            }
        },
        "content": [
            {
                "reader": {
                    "name": "hdfsreader",
                    "parameter": {
                        "path": "/in/datax/user_submit*",
                        "defaultFS": "hdfs://hadoop10:9000",
                        "column": [
                               {
                                "index": 0,
                                "type": "string"
                               },
                               {
                                "index": 1,
                                "type": "string"
                               },
                               {
                                "index": 2,
                                "type": "string"
                               }
                        ],
                        "fileType": "text",
                        "encoding": "UTF-8",
                        "fieldDelimiter": "\t"
                    }

                },
                "writer": {
                    "name": "mysqlwriter",
                    "parameter": {
                        "column": ["device_id","profile","blog_url"],
                        "connection": [
                            {
                                "jdbcUrl": "jdbc:mysql://hadoop10:3306/sqoop?characterEncoding=UTF-8",
                                "table": ["user_submit"]
                            }
                        ],
                        "password": "123456",
                        "preSql": ["truncate table user_submit"],
                        "session": [],
                        "username": "root",
                        "writeMode": "insert"
                    }
                }
            }
        ]
    }
}

ftp2hdfs

{
    "job": {
        "setting": {
            "speed": {
                "channel": 2
            }
        },
        "content": [
            {
                "reader": {
                    "name": "ftpreader",
                    "parameter": {
                        "protocol": "sftp",
                        "host": "hadoop10",
                        "port": 22,
                        "username": "root",
                        "password": "123456",
                        "path": [
                            "/opt/data/dept.txt"
                        ],
                        "column": [
                            {
                                "index": 0,
                                "type": "string"
                            },
                            {
                                "index": 1,
                                "type": "string"
                            } 
                        ],
                        "encoding": "UTF-8",
                        "fieldDelimiter": ","
                    }
                },
                "writer": {
                    "name": "hdfswriter",
                    "parameter": {
                        "column": [{
                         		"name": "user_id",
                         		"type": "STRING"
                     		},
                     		{
                        		"name": "user_name",
                         		"type": "STRING"
                     		} ],
                        "compress": "",
                        "defaultFS": "hdfs://hdfs-cluster",
						"hadoopConfig":{
								"dfs.nameservices": "hdfs-cluster",
								"dfs.ha.namenodes.hdfs-cluster": "nn1,nn2",
								"dfs.namenode.rpc-address.hdfs-cluster.nn1": "hadoop11:9000",
								"dfs.namenode.rpc-address.hdfs-cluster.nn2": "hadoop12:9000",
								"dfs.client.failover.proxy.provider.hdfs-cluster": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
						},
                        "fieldDelimiter": "\t",
                        "fileName": "ftp_test",
                        "fileType": "text",
                        "path": "/qwer",
                        "writeMode": "append"
                    }
                }
            }
        ]
    }
}
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

@莫等闲

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值