一.配置文件mongo-hdfs.json
{
"job": {
"setting": {
"speed": {
"channel": 2
}
},
"content": [
{
"reader": {
"name": "mongodbreader",
"parameter": {
"address": ["cdh3:27017"],
"userName": "",
"userPassword": "",
"dbName": "db",
"collectionName": "carVoltage",
"query":"{time:{ $gte: ISODate('2016-11-28T11:16:33.303Z'), $lte: ISODate('2017-12-28T11:16:33.303Z') }}",
"column": [
{
"index":0,
"name": "_id",
"type": "string"
}, {
"index":1,
"name": "plateNum",
"type": "string"
}, {
"index":2,
"name": "terminalId",
"type": "string"
}, {
"index":3,
"name": "time",
"type": "date"
}, {
"index":4,
"name": "voltage",
"type": "double"
}
]
}
},
"writer": {
"name": "hdfswriter",
"parameter": {
"defaultFS": "hdfs://10.x.x.x:8020",
"fileType": "text",
"path": "/user/anguoan",
"fileName": "zhenghaoReader",
"column": [
{
"index":0,
"name": "id",
"type": "string"
},
{
"index":1,
"name": "plateNum",
"type": "string"
},
{
"index":2,
"name": "terminalId",
"type": "string"
},
{
"index":3,
"name": "time",
"type": "date"
},
{
"index":4,
"name": "voltage",
"type": "double"
}
],
"writeMode": "append",
"fieldDelimiter": ","
}
}
}
]
}
}
二.执行脚本
datax.py mongo-hdfs.json