Datax同步GBase8a数据(大表)到MySQL测试
1、生成测试脚本语句
#!/bin/bash
firstnum=1
endnum=1000
filename='/home/gbase/randomValues150.sql'
date_t=`date +%F`
echo "INSERT INTO t_dx_mysql150 (field1, field2, field3, field4, field5, field6, field7, field8, field9, field10, field11, field12, field13, field14, field15, field16, field17, field18, field19, field20, field21, field22, field23, field24, field25, field26, field27, field28, field29, field30, field31, field32, field33, field34, field35, field36, field37, field38, field39, field40, field41, field42, field43, field44, field45, field46, field47, field48, field49, field50, field51, field52, field53, field54, field55, field56, field57, field58, field59, field60, field61, field62, field63, field64, field65, field66, field67, field68, field69, field70, field71, field72, field73, field74, field75, field76, field77, field78, field79, field80, field81, field82, field83, field84, field85, field86, field87, field88, field89, field90, field91, field92, field93, field94, field95, field96, field97, field98, field99, field100, field101, field102, field103, field104, field105, field106, field107, field108, field109, field110, field111, field112, field113, field114, field115, field116, field117, field118, field119, field120, field121, field122, field123, field124, field125, field126, field127, field128, field129, field130, field131, field132, field133, field134, field135, field136, field137, field138, field139, field140, field141,field142,field143,field144,field145,field146,field147,field148,field149,field150) VALUES " > $filename
for i in $(seq $firstnum $endnum)
do
rand_string=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 8 | head -n 1)
if [ $i != $endnum ];
then
echo "('$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t'),"
else
echo "('$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t', '$rand_string', $i, '$rand_string', $i, '$date_t');"
fi
done >> $filename
2、相关表结构
GBase8a相关表结构
CREATE TABLE t_dx_mysql150 (
id int NOT NULL AUTO_INCREMENT,
field1 VARCHAR(50),
field2 INT,
field3 VARCHAR(100),
field4 DECIMAL(10, 2),
field5 DATE,
field6 VARCHAR(50),
field7 INT,
field8 VARCHAR(100),
field9 DECIMAL(10, 2),
field10 DATE,
field11 VARCHAR(50),
field12 INT,
field13 VARCHAR(100),
field14 DECIMAL(10, 2),
field15 DATE,
field16 VARCHAR(50),
field17 INT,
field18 VARCHAR(100),
field19 DECIMAL(10, 2),
field20 DATE,
field21 VARCHAR(50),
field22 INT,
field23 VARCHAR(100),
field24 DECIMAL(10, 2),
field25 DATE,
field26 VARCHAR(50),
field27 INT,
field28 VARCHAR(100),
field29 DECIMAL(10, 2),
field30 DATE,
field31 VARCHAR(50),
field32 INT,
field33 VARCHAR(100),
field34 DECIMAL(10, 2),
field35 DATE,
field36 VARCHAR(50),
field37 INT,
field38 VARCHAR(100),
field39 DECIMAL(10, 2),
field40 DATE,
field41 VARCHAR(50),
field42 INT,
field43 VARCHAR(100),
field44 DECIMAL(10, 2),
field45 DATE,
field46 VARCHAR(50),
field47 INT,
field48 VARCHAR(100),
field49 DECIMAL(10, 2),
field50 DATE,
field51 VARCHAR(50),
field52 INT,
field53 VARCHAR(100),
field54 DECIMAL(10, 2),
field55 DATE,
field56 VARCHAR(50),
field57 INT,
field58 VARCHAR(100),
field59 DECIMAL(10, 2),
field60 DATE,
field61 VARCHAR(50),
field62 INT,
field63 VARCHAR(100),
field64 DECIMAL(10, 2),
field65 DATE,
field66 VARCHAR(50),
field67 INT,
field68 VARCHAR(100),
field69 DECIMAL(10, 2),
field70 DATE,
field71 VARCHAR(50),
field72 INT,
field73 VARCHAR(100),
field74 DECIMAL(10, 2),
field75 DATE,
field76 VARCHAR(50),
field77 INT,
field78 VARCHAR(100),
field79 DECIMAL(10, 2),
field80 DATE,
field81 VARCHAR(50),
field82 INT,
field83 VARCHAR(100),
field84 DECIMAL(10, 2),
field85 DATE,
field86 VARCHAR(50),
field87 INT,
field88 VARCHAR(100),
field89 DECIMAL(10, 2),
field90 DATE,
field91 VARCHAR(50),
field92 INT,
field93 VARCHAR(100),
field94 DECIMAL(10, 2),
field95 DATE,
field96 VARCHAR(50),
field97 INT,
field98 VARCHAR(100),
field99 DECIMAL(10, 2),
field100 DATE,
field101 VARCHAR(50),
field102 INT,
field103 VARCHAR(100),
field104 DECIMAL(10, 2),
field105 DATE,
field106 VARCHAR(50),
field107 INT,
field108 VARCHAR(100),
field109 DECIMAL(10, 2),
field110 DATE,
field111 VARCHAR(50),
field112 INT,
field113 VARCHAR(100),
field114 DECIMAL(10, 2),
field115 DATE,
field116 VARCHAR(50),
field117 INT,
field118 VARCHAR(100),
field119 DECIMAL(10, 2),
field120 DATE,
field121 VARCHAR(50),
field122 INT,
field123 VARCHAR(100),
field124 DECIMAL(10, 2),
field125 DATE,
field126 VARCHAR(50),
field127 INT,
field128 VARCHAR(100),
field129 DECIMAL(10, 2),
field130 DATE,
field131 VARCHAR(50),
field132 INT,
field133 VARCHAR(100),
field134 DECIMAL(10, 2),
field135 DATE,
field136 VARCHAR(50),
field137 INT,
field138 VARCHAR(100),
field139 DECIMAL(10, 2),
field140 DATE,
field141 VARCHAR(50),
field142 INT,
field143 VARCHAR(50),
field144 DECIMAL(10, 2),
field145 DATE,
field146 VARCHAR(50),
field147 INT,
field148 VARCHAR(50),
field149 DECIMAL(10, 2),
field150 DATE,
primary key(id)
);
MySQL相关表结构
CREATE TABLE t_dx_mysql150 (
id int NOT NULL,
field1 VARCHAR(50),
field2 INT,
field3 VARCHAR(100),
field4 DECIMAL(10, 2),
field5 DATE,
field6 VARCHAR(50),
field7 INT,
field8 VARCHAR(100),
field9 DECIMAL(10, 2),
field10 DATE,
field11 VARCHAR(50),
field12 INT,
field13 VARCHAR(100),
field14 DECIMAL(10, 2),
field15 DATE,
field16 VARCHAR(50),
field17 INT,
field18 VARCHAR(100),
field19 DECIMAL(10, 2),
field20 DATE,
field21 VARCHAR(50),
field22 INT,
field23 VARCHAR(100),
field24 DECIMAL(10, 2),
field25 DATE,
field26 VARCHAR(50),
field27 INT,
field28 VARCHAR(100),
field29 DECIMAL(10, 2),
field30 DATE,
field31 VARCHAR(50),
field32 INT,
field33 VARCHAR(100),
field34 DECIMAL(10, 2),
field35 DATE,
field36 VARCHAR(50),
field37 INT,
field38 VARCHAR(100),
field39 DECIMAL(10, 2),
field40 DATE,
field41 VARCHAR(50),
field42 INT,
field43 VARCHAR(100),
field44 DECIMAL(10, 2),
field45 DATE,
field46 VARCHAR(50),
field47 INT,
field48 VARCHAR(100),
field49 DECIMAL(10, 2),
field50 DATE,
field51 VARCHAR(50),
field52 INT,
field53 VARCHAR(100),
field54 DECIMAL(10, 2),
field55 DATE,
field56 VARCHAR(50),
field57 INT,
field58 VARCHAR(100),
field59 DECIMAL(10, 2),
field60 DATE,
field61 VARCHAR(50),
field62 INT,
field63 VARCHAR(100),
field64 DECIMAL(10, 2),
field65 DATE,
field66 VARCHAR(50),
field67 INT,
field68 VARCHAR(100),
field69 DECIMAL(10, 2),
field70 DATE,
field71 VARCHAR(50),
field72 INT,
field73 VARCHAR(100),
field74 DECIMAL(10, 2),
field75 DATE,
field76 VARCHAR(50),
field77 INT,
field78 VARCHAR(100),
field79 DECIMAL(10, 2),
field80 DATE,
field81 VARCHAR(50),
field82 INT,
field83 VARCHAR(100),
field84 DECIMAL(10, 2),
field85 DATE,
field86 VARCHAR(50),
field87 INT,
field88 VARCHAR(100),
field89 DECIMAL(10, 2),
field90 DATE,
field91 VARCHAR(50),
field92 INT,
field93 VARCHAR(100),
field94 DECIMAL(10, 2),
field95 DATE,
field96 VARCHAR(50),
field97 INT,
field98 VARCHAR(100),
field99 DECIMAL(10, 2),
field100 DATE,
field101 VARCHAR(50),
field102 INT,
field103 VARCHAR(100),
field104 DECIMAL(10, 2),
field105 DATE,
field106 VARCHAR(50),
field107 INT,
field108 VARCHAR(100),
field109 DECIMAL(10, 2),
field110 DATE,
field111 VARCHAR(50),
field112 INT,
field113 VARCHAR(100),
field114 DECIMAL(10, 2),
field115 DATE,
field116 VARCHAR(50),
field117 INT,
field118 VARCHAR(100),
field119 DECIMAL(10, 2),
field120 DATE,
field121 VARCHAR(50),
field122 INT,
field123 VARCHAR(100),
field124 DECIMAL(10, 2),
field125 DATE,
field126 VARCHAR(50),
field127 INT,
field128 VARCHAR(100),
field129 DECIMAL(10, 2),
field130 DATE,
field131 VARCHAR(50),
field132 INT,
field133 VARCHAR(100),
field134 DECIMAL(10, 2),
field135 DATE,
field136 VARCHAR(50),
field137 INT,
field138 VARCHAR(100),
field139 DECIMAL(10, 2),
field140 DATE,
field141 VARCHAR(50),
field142 INT,
field143 VARCHAR(50),
field144 DECIMAL(10, 2),
field145 DATE,
field146 VARCHAR(50),
field147 INT,
field148 VARCHAR(50),
field149 DECIMAL(10, 2),
field150 DATE
);
3、GBase8a数据翻倍
多次执行SQL语句,让数据翻倍
INSERT INTO t_dx_mysql150 (field1, field2, field3, field4, field5, field6, field7, field8, field9, field10, field11, field12, field13, field14, field15, field16, field17, field18, field19, field20, field21, field22, field23, field24, field25, field26, field27, field28, field29, field30, field31, field32, field33, field34, field35, field36, field37, field38, field39, field40, field41, field42, field43, field44, field45, field46, field47, field48, field49, field50, field51, field52, field53, field54, field55, field56, field57, field58, field59, field60, field61, field62, field63, field64, field65, field66, field67, field68, field69, field70, field71, field72, field73, field74, field75, field76, field77, field78, field79, field80, field81, field82, field83, field84, field85, field86, field87, field88, field89, field90, field91, field92, field93, field94, field95, field96, field97, field98, field99, field100, field101, field102, field103, field104, field105, field106, field107, field108, field109, field110, field111, field112, field113, field114, field115, field116, field117, field118, field119, field120, field121, field122, field123, field124, field125, field126, field127, field128, field129, field130, field131, field132, field133, field134, field135, field136, field137, field138, field139, field140, field141,field142,field143,field144,field145,field146,field147,field148,field149,field150)
select field1, field2, field3, field4, field5, field6, field7, field8, field9, field10, field11, field12, field13, field14, field15, field16, field17, field18, field19, field20, field21, field22, field23, field24, field25, field26, field27, field28, field29, field30, field31, field32, field33, field34, field35, field36, field37, field38, field39, field40, field41, field42, field43, field44, field45, field46, field47, field48, field49, field50, field51, field52, field53, field54, field55, field56, field57, field58, field59, field60, field61, field62, field63, field64, field65, field66, field67, field68, field69, field70, field71, field72, field73, field74, field75, field76, field77, field78, field79, field80, field81, field82, field83, field84, field85, field86, field87, field88, field89, field90, field91, field92, field93, field94, field95, field96, field97, field98, field99, field100, field101, field102, field103, field104, field105, field106, field107, field108, field109, field110, field111, field112, field113, field114, field115, field116, field117, field118, field119, field120, field121, field122, field123, field124, field125, field126, field127, field128, field129, field130, field131, field132, field133, field134, field135, field136, field137, field138, field139, field140, field141,field142,field143,field144,field145,field146,field147,field148,field149,field150
from t_dx_mysql150;
4、配置datax工具
如何实现读支持
(1) 把对应驱动上传至${DATAX_HOME}/plugin/reader/rdbmsreader/libs目录下
(2) 修改${DATAX_HOME}/plugin/reader/rdbmsreader/plugin.json配置文件
[root@node3 rdbmsreader]# cat plugin.json
{
"name": "rdbmsreader",
"class": "com.alibaba.datax.plugin.reader.rdbmsreader.RdbmsReader",
"description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.",
"developer": "alibaba",
"drivers":[ "com.sybase.jdbc3.jdbc.SybDriver", "com.edb.Driver", "com.ibm.db2.jcc.DB2Driver","com.gbase.jdbc.Driver"]
}
5、同步任务的配置文件
配置文件一:通过 where 条件限制 150w条内
cat gb2mysql150.json
{
"job": {
"content": [
{
"reader": {
"name": "rdbmsreader",
"parameter": {
"column": ["id","field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "field12", "field13", "field14", "field15", "field16", "field17", "field18", "field19", "field20", "field21", "field22", "field23", "field24", "field25", "field26", "field27", "field28", "field29", "field30", "field31", "field32", "field33", "field34", "field35", "field36", "field37", "field38", "field39", "field40", "field41", "field42", "field43", "field44", "field45", "field46", "field47", "field48", "field49", "field50", "field51", "field52", "field53", "field54", "field55", "field56", "field57", "field58", "field59", "field60", "field61", "field62", "field63", "field64", "field65", "field66", "field67", "field68", "field69", "field70", "field71", "field72", "field73", "field74", "field75", "field76", "field77", "field78", "field79", "field80", "field81", "field82", "field83", "field84", "field85", "field86", "field87", "field88", "field89", "field90", "field91", "field92", "field93", "field94", "field95", "field96", "field97", "field98", "field99", "field100", "field101", "field102", "field103", "field104", "field105", "field106", "field107", "field108", "field109", "field110", "field111", "field112", "field113", "field114", "field115", "field116", "field117", "field118", "field119", "field120", "field121", "field122", "field123", "field124", "field125", "field126", "field127", "field128", "field129", "field130", "field131", "field132", "field133", "field134", "field135", "field136", "field137", "field138", "field139", "field140", "field141", "field142", "field143", "field144", "field145", "field146", "field147", "field148", "field149", "field150"],
"connection": [
{
"jdbcUrl": ["jdbc:gbase://192.168.122.21:5258/test"],
"table": ["t_dx_mysql150"]
}
],
"password": "gbase20110531",
"username": "gbase",
"where": "id < 1500000"
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"column": ["id","field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "field12", "field13", "field14", "field15", "field16", "field17", "field18", "field19", "field20", "field21", "field22", "field23", "field24", "field25", "field26", "field27", "field28", "field29", "field30", "field31", "field32", "field33", "field34", "field35", "field36", "field37", "field38", "field39", "field40", "field41", "field42", "field43", "field44", "field45", "field46", "field47", "field48", "field49", "field50", "field51", "field52", "field53", "field54", "field55", "field56", "field57", "field58", "field59", "field60", "field61", "field62", "field63", "field64", "field65", "field66", "field67", "field68", "field69", "field70", "field71", "field72", "field73", "field74", "field75", "field76", "field77", "field78", "field79", "field80", "field81", "field82", "field83", "field84", "field85", "field86", "field87", "field88", "field89", "field90", "field91", "field92", "field93", "field94", "field95", "field96", "field97", "field98", "field99", "field100", "field101", "field102", "field103", "field104", "field105", "field106", "field107", "field108", "field109", "field110", "field111", "field112", "field113", "field114", "field115", "field116", "field117", "field118", "field119", "field120", "field121", "field122", "field123", "field124", "field125", "field126", "field127", "field128", "field129", "field130", "field131", "field132", "field133", "field134", "field135", "field136", "field137", "field138", "field139", "field140", "field141", "field142", "field143", "field144", "field145", "field146", "field147", "field148", "field149", "field150"],
"connection": [
{
"jdbcUrl": "jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false",
"table": ["t_dx_mysql150"]
}
],
"password": "password",
"preSql": [],
"session": [],
"username": "root",
"writeMode": "insert",
// 默认为1024,可以适当修改
// 超过100个字段,插入速度会下降
"batchSize": 20480
}
}
}
],
"setting": {
"speed": {
"channel": "3"
}
}
}
}
配置文件二:通过 where 条件限制 100w条内
cat gb2mysql100.json
{
"job": {
"content": [
{
"reader": {
"name": "rdbmsreader",
"parameter": {
"column": ["id","field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "field12", "field13", "field14", "field15", "field16", "field17", "field18", "field19", "field20", "field21", "field22", "field23", "field24", "field25", "field26", "field27", "field28", "field29", "field30", "field31", "field32", "field33", "field34", "field35", "field36", "field37", "field38", "field39", "field40", "field41", "field42", "field43", "field44", "field45", "field46", "field47", "field48", "field49", "field50", "field51", "field52", "field53", "field54", "field55", "field56", "field57", "field58", "field59", "field60", "field61", "field62", "field63", "field64", "field65", "field66", "field67", "field68", "field69", "field70", "field71", "field72", "field73", "field74", "field75", "field76", "field77", "field78", "field79", "field80", "field81", "field82", "field83", "field84", "field85", "field86", "field87", "field88", "field89", "field90", "field91", "field92", "field93", "field94", "field95", "field96", "field97", "field98", "field99", "field100", "field101", "field102", "field103", "field104", "field105", "field106", "field107", "field108", "field109", "field110", "field111", "field112", "field113", "field114", "field115", "field116", "field117", "field118", "field119", "field120", "field121", "field122", "field123", "field124", "field125", "field126", "field127", "field128", "field129", "field130", "field131", "field132", "field133", "field134", "field135", "field136", "field137", "field138", "field139", "field140", "field141", "field142", "field143", "field144", "field145", "field146", "field147", "field148", "field149", "field150"],
"connection": [
{
"jdbcUrl": ["jdbc:gbase://192.168.122.21:5258/test"],
"table": ["t_dx_mysql150"]
}
],
"password": "gbase20110531",
"username": "gbase",
"where": "id < 1000000"
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"column": ["id","field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "field12", "field13", "field14", "field15", "field16", "field17", "field18", "field19", "field20", "field21", "field22", "field23", "field24", "field25", "field26", "field27", "field28", "field29", "field30", "field31", "field32", "field33", "field34", "field35", "field36", "field37", "field38", "field39", "field40", "field41", "field42", "field43", "field44", "field45", "field46", "field47", "field48", "field49", "field50", "field51", "field52", "field53", "field54", "field55", "field56", "field57", "field58", "field59", "field60", "field61", "field62", "field63", "field64", "field65", "field66", "field67", "field68", "field69", "field70", "field71", "field72", "field73", "field74", "field75", "field76", "field77", "field78", "field79", "field80", "field81", "field82", "field83", "field84", "field85", "field86", "field87", "field88", "field89", "field90", "field91", "field92", "field93", "field94", "field95", "field96", "field97", "field98", "field99", "field100", "field101", "field102", "field103", "field104", "field105", "field106", "field107", "field108", "field109", "field110", "field111", "field112", "field113", "field114", "field115", "field116", "field117", "field118", "field119", "field120", "field121", "field122", "field123", "field124", "field125", "field126", "field127", "field128", "field129", "field130", "field131", "field132", "field133", "field134", "field135", "field136", "field137", "field138", "field139", "field140", "field141", "field142", "field143", "field144", "field145", "field146", "field147", "field148", "field149", "field150"],
"connection": [
{
"jdbcUrl": "jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false",
"table": ["t_dx_mysql150"]
}
],
"password": "password",
"preSql": [],
"session": [],
"username": "root",
"writeMode": "insert",
"batchSize": 20480
}
}
}
],
"setting": {
"speed": {
"channel": "3"
}
}
}
}
6、测试结果
(1) 同步100W以内行数据
[root@keep-hadoop bin]# python datax.py gb2mysql100.json
DataX (DATAX-OPENSOURCE-3.0), From Alibaba !
Copyright (C) 2010-2017, Alibaba Group. All Rights Reserved.
2024-04-08 14:46:54.200 [main] INFO MessageSource - JVM TimeZone: GMT+08:00, Locale: zh_CN
2024-04-08 14:46:54.204 [main] INFO MessageSource - use Locale: zh_CN timeZone: sun.util.calendar.ZoneInfo[id="GMT+08:00",offset=28800000,dstSavings=0,useDaylight=false,transitions=0,lastRule=null]
2024-04-08 14:46:54.219 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
2024-04-08 14:46:54.227 [main] INFO Engine - the machine info =>
osInfo: Linux amd64 3.10.0-1160.el7.x86_64
jvmInfo: Oracle Corporation 1.8 25.144-b01
cpu num: 4
totalPhysicalMemory: -0.00G
freePhysicalMemory: -0.00G
maxFileDescriptorCount: -1
currentOpenFileDescriptorCount: -1
GC Names [PS MarkSweep, PS Scavenge]
MEMORY_NAME | allocation_size | init_size
PS Eden Space | 1,536.50MB | 512.50MB
Code Cache | 240.00MB | 2.44MB
Compressed Class Space | 1,024.00MB | 0.00MB
PS Survivor Space | 85.00MB | 85.00MB
PS Old Gen | 3,413.50MB | 1,365.50MB
Metaspace | -0.00MB | 0.00MB
2024-04-08 14:46:54.261 [main] INFO Engine -
{
"content":[
{
"reader":{
"name":"rdbmsreader",
"parameter":{
"column":[
"id",
"field1",
"field2",
"field3",
"field4",
"field5",
"field6",
"field7",
"field8",
"field9",
"field10",
"field11",
"field12",
"field13",
"field14",
"field15",
"field16",
"field17",
"field18",
"field19",
"field20",
"field21",
"field22",
"field23",
"field24",
"field25",
"field26",
"field27",
"field28",
"field29",
"field30",
"field31",
"field32",
"field33",
"field34",
"field35",
"field36",
"field37",
"field38",
"field39",
"field40",
"field41",
"field42",
"field43",
"field44",
"field45",
"field46",
"field47",
"field48",
"field49",
"field50",
"field51",
"field52",
"field53",
"field54",
"field55",
"field56",
"field57",
"field58",
"field59",
"field60",
"field61",
"field62",
"field63",
"field64",
"field65",
"field66",
"field67",
"field68",
"field69",
"field70",
"field71",
"field72",
"field73",
"field74",
"field75",
"field76",
"field77",
"field78",
"field79",
"field80",
"field81",
"field82",
"field83",
"field84",
"field85",
"field86",
"field87",
"field88",
"field89",
"field90",
"field91",
"field92",
"field93",
"field94",
"field95",
"field96",
"field97",
"field98",
"field99",
"field100",
"field101",
"field102",
"field103",
"field104",
"field105",
"field106",
"field107",
"field108",
"field109",
"field110",
"field111",
"field112",
"field113",
"field114",
"field115",
"field116",
"field117",
"field118",
"field119",
"field120",
"field121",
"field122",
"field123",
"field124",
"field125",
"field126",
"field127",
"field128",
"field129",
"field130",
"field131",
"field132",
"field133",
"field134",
"field135",
"field136",
"field137",
"field138",
"field139",
"field140",
"field141",
"field142",
"field143",
"field144",
"field145",
"field146",
"field147",
"field148",
"field149",
"field150"
],
"connection":[
{
"jdbcUrl":[
"jdbc:gbase://192.168.122.21:5258/test"
],
"table":[
"t_dx_mysql150"
]
}
],
"password":"*************",
"username":"gbase",
"where":"id < 1000000"
}
},
"writer":{
"name":"mysqlwriter",
"parameter":{
"column":[
"id",
"field1",
"field2",
"field3",
"field4",
"field5",
"field6",
"field7",
"field8",
"field9",
"field10",
"field11",
"field12",
"field13",
"field14",
"field15",
"field16",
"field17",
"field18",
"field19",
"field20",
"field21",
"field22",
"field23",
"field24",
"field25",
"field26",
"field27",
"field28",
"field29",
"field30",
"field31",
"field32",
"field33",
"field34",
"field35",
"field36",
"field37",
"field38",
"field39",
"field40",
"field41",
"field42",
"field43",
"field44",
"field45",
"field46",
"field47",
"field48",
"field49",
"field50",
"field51",
"field52",
"field53",
"field54",
"field55",
"field56",
"field57",
"field58",
"field59",
"field60",
"field61",
"field62",
"field63",
"field64",
"field65",
"field66",
"field67",
"field68",
"field69",
"field70",
"field71",
"field72",
"field73",
"field74",
"field75",
"field76",
"field77",
"field78",
"field79",
"field80",
"field81",
"field82",
"field83",
"field84",
"field85",
"field86",
"field87",
"field88",
"field89",
"field90",
"field91",
"field92",
"field93",
"field94",
"field95",
"field96",
"field97",
"field98",
"field99",
"field100",
"field101",
"field102",
"field103",
"field104",
"field105",
"field106",
"field107",
"field108",
"field109",
"field110",
"field111",
"field112",
"field113",
"field114",
"field115",
"field116",
"field117",
"field118",
"field119",
"field120",
"field121",
"field122",
"field123",
"field124",
"field125",
"field126",
"field127",
"field128",
"field129",
"field130",
"field131",
"field132",
"field133",
"field134",
"field135",
"field136",
"field137",
"field138",
"field139",
"field140",
"field141",
"field142",
"field143",
"field144",
"field145",
"field146",
"field147",
"field148",
"field149",
"field150"
],
"connection":[
{
"jdbcUrl":"jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false",
"table":[
"t_dx_mysql150"
]
}
],
"password":"********",
"preSql":[
],
"session":[
],
"username":"root",
"writeMode":"insert",
"batchSize":2048
}
}
}
],
"setting":{
"speed":{
"channel":"2"
}
}
}
2024-04-08 14:46:54.289 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false
2024-04-08 14:46:54.290 [main] INFO JobContainer - DataX jobContainer starts job.
2024-04-08 14:46:54.291 [main] INFO JobContainer - Set jobId = 0
2024-04-08 14:46:54.852 [job-0] INFO OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:gbase://192.168.122.21:5258/test.
2024-04-08 14:46:55.099 [job-0] INFO OriginalConfPretreatmentUtil - table:[t_dx_mysql150] has columns:[id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150].
2024-04-08 14:46:55.417 [job-0] INFO OriginalConfPretreatmentUtil - table:[t_dx_mysql150] all columns:[
id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150
].
2024-04-08 14:46:55.431 [job-0] INFO OriginalConfPretreatmentUtil - Write data [
insert INTO %s (id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
], which jdbcUrl like:[jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false&yearIsDateType=false&zeroDateTimeBehavior=convertToNull&rewriteBatchedStatements=true&tinyInt1isBit=false]
2024-04-08 14:46:55.432 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
2024-04-08 14:46:55.433 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] do prepare work .
2024-04-08 14:46:55.433 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
2024-04-08 14:46:55.434 [job-0] INFO JobContainer - jobContainer starts to do split ...
2024-04-08 14:46:55.434 [job-0] INFO JobContainer - Job set Channel-Number to 2 channels.
2024-04-08 14:46:55.438 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] splits to [1] tasks.
2024-04-08 14:46:55.439 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
2024-04-08 14:46:55.478 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
2024-04-08 14:46:55.482 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
2024-04-08 14:46:55.484 [job-0] INFO JobContainer - Running by standalone Mode.
2024-04-08 14:46:55.494 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
2024-04-08 14:46:55.580 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
2024-04-08 14:46:55.580 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
2024-04-08 14:46:55.593 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
2024-04-08 14:46:55.597 [0-0-0-reader] INFO CommonRdbmsReader$Task - Begin to read record by Sql: [select id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150 from t_dx_mysql150 where (id < 1000000)
] jdbcUrl:[jdbc:gbase://192.168.122.21:5258/test].
2024-04-08 14:47:06.928 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 14:47:18.750 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
....
2024-04-08 15:20:25.538 [job-0] INFO StandAloneJobContainerCommunicator - Total 963616 records, 953457895 bytes | Speed 489.71KB/s, 506 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 278.408s | All Task WaitReaderTime 1,687.931s | Percentage 0.00%
2024-04-08 15:20:26.100 [0-0-0-reader] INFO CommonRdbmsReader$Task - Finished read record by Sql: [select id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150 from t_dx_mysql150 where (id < 1000000)
] jdbcUrl:[jdbc:gbase://192.168.122.21:5258/test].
2024-04-08 15:20:26.539 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[2010949]ms
2024-04-08 15:20:26.540 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] completed it's tasks.
2024-04-08 15:20:35.539 [job-0] INFO StandAloneJobContainerCommunicator - Total 971347 records, 961111591 bytes | Speed 747.43KB/s, 773 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 280.557s | All Task WaitReaderTime 1,709.096s | Percentage 100.00%
2024-04-08 15:20:35.540 [job-0] INFO AbstractScheduler - Scheduler accomplished all tasks.
2024-04-08 15:20:35.541 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do post work.
2024-04-08 15:20:35.542 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] do post work.
2024-04-08 15:20:35.542 [job-0] INFO JobContainer - DataX jobId [0] completed successfully.
2024-04-08 15:20:35.545 [job-0] INFO HookInvoker - No hook invoked, because base dir not exists or is a file: /opt/datax/hook
2024-04-08 15:20:35.545 [job-0] INFO JobContainer -
[total cpu info] =>
averageCpu | maxDeltaCpu | minDeltaCpu
-1.00% | -1.00% | -1.00%
[total gc info] =>
NAME | totalGCCount | maxDeltaGCCount | minDeltaGCCount | totalGCTime | maxDeltaGCTime | minDeltaGCTime
PS MarkSweep | 162 | 25 | 13 | 1,699.302s | 265.327s | 137.615s
PS Scavenge | 4 | 4 | 0 | 3.504s | 3.504s | 0.000s
2024-04-08 15:20:35.546 [job-0] INFO JobContainer - PerfTrace not enable!
2024-04-08 15:20:35.546 [job-0] INFO StandAloneJobContainerCommunicator - Total 971347 records, 961111591 bytes | Speed 464.65KB/s, 480 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 280.557s | All Task WaitReaderTime 1,709.096s | Percentage 100.00%
2024-04-08 15:20:35.548 [job-0] INFO JobContainer -
任务启动时刻 : 2024-04-08 14:46:54
任务结束时刻 : 2024-04-08 15:20:35
任务总计耗时 : 2021s
任务平均流量 : 464.65KB/s
记录写入速度 : 480rec/s
读出记录总数 : 971347
读写失败总数 : 0
(2) 同步150W以内行数据(报错情况)
主要报错:java.lang.OutOfMemoryError: GC overhead limit exceeded
[root@keep-hadoop bin]# python datax.py gb2mysql150.json
DataX (DATAX-OPENSOURCE-3.0), From Alibaba !
Copyright (C) 2010-2017, Alibaba Group. All Rights Reserved.
2024-04-08 16:05:01.332 [main] INFO MessageSource - JVM TimeZone: GMT+08:00, Locale: zh_CN
2024-04-08 16:05:01.335 [main] INFO MessageSource - use Locale: zh_CN timeZone: sun.util.calendar.ZoneInfo[id="GMT+08:00",offset=28800000,dstSavings=0,useDaylight=false,transitions=0,lastRule=null]
2024-04-08 16:05:01.351 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
2024-04-08 16:05:01.359 [main] INFO Engine - the machine info =>
osInfo: Linux amd64 3.10.0-1160.el7.x86_64
jvmInfo: Oracle Corporation 1.8 25.144-b01
cpu num: 4
totalPhysicalMemory: -0.00G
freePhysicalMemory: -0.00G
maxFileDescriptorCount: -1
currentOpenFileDescriptorCount: -1
GC Names [PS MarkSweep, PS Scavenge]
MEMORY_NAME | allocation_size | init_size
PS Eden Space | 1,536.50MB | 512.50MB
Code Cache | 240.00MB | 2.44MB
Compressed Class Space | 1,024.00MB | 0.00MB
PS Survivor Space | 85.00MB | 85.00MB
PS Old Gen | 3,413.50MB | 1,365.50MB
Metaspace | -0.00MB | 0.00MB
2024-04-08 16:05:01.395 [main] INFO Engine -
{
"content":[
{
"reader":{
"name":"rdbmsreader",
"parameter":{
"column":[
"id",
"field1",
"field2",
"field3",
"field4",
"field5",
"field6",
"field7",
"field8",
"field9",
"field10",
"field11",
"field12",
"field13",
"field14",
"field15",
"field16",
"field17",
"field18",
"field19",
"field20",
"field21",
"field22",
"field23",
"field24",
"field25",
"field26",
"field27",
"field28",
"field29",
"field30",
"field31",
"field32",
"field33",
"field34",
"field35",
"field36",
"field37",
"field38",
"field39",
"field40",
"field41",
"field42",
"field43",
"field44",
"field45",
"field46",
"field47",
"field48",
"field49",
"field50",
"field51",
"field52",
"field53",
"field54",
"field55",
"field56",
"field57",
"field58",
"field59",
"field60",
"field61",
"field62",
"field63",
"field64",
"field65",
"field66",
"field67",
"field68",
"field69",
"field70",
"field71",
"field72",
"field73",
"field74",
"field75",
"field76",
"field77",
"field78",
"field79",
"field80",
"field81",
"field82",
"field83",
"field84",
"field85",
"field86",
"field87",
"field88",
"field89",
"field90",
"field91",
"field92",
"field93",
"field94",
"field95",
"field96",
"field97",
"field98",
"field99",
"field100",
"field101",
"field102",
"field103",
"field104",
"field105",
"field106",
"field107",
"field108",
"field109",
"field110",
"field111",
"field112",
"field113",
"field114",
"field115",
"field116",
"field117",
"field118",
"field119",
"field120",
"field121",
"field122",
"field123",
"field124",
"field125",
"field126",
"field127",
"field128",
"field129",
"field130",
"field131",
"field132",
"field133",
"field134",
"field135",
"field136",
"field137",
"field138",
"field139",
"field140",
"field141",
"field142",
"field143",
"field144",
"field145",
"field146",
"field147",
"field148",
"field149",
"field150"
],
"connection":[
{
"jdbcUrl":[
"jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000"
],
"table":[
"t_dx_mysql150"
]
}
],
"password":"*************",
"username":"gbase",
"where":"id < 1500000"
}
},
"writer":{
"name":"mysqlwriter",
"parameter":{
"column":[
"id",
"field1",
"field2",
"field3",
"field4",
"field5",
"field6",
"field7",
"field8",
"field9",
"field10",
"field11",
"field12",
"field13",
"field14",
"field15",
"field16",
"field17",
"field18",
"field19",
"field20",
"field21",
"field22",
"field23",
"field24",
"field25",
"field26",
"field27",
"field28",
"field29",
"field30",
"field31",
"field32",
"field33",
"field34",
"field35",
"field36",
"field37",
"field38",
"field39",
"field40",
"field41",
"field42",
"field43",
"field44",
"field45",
"field46",
"field47",
"field48",
"field49",
"field50",
"field51",
"field52",
"field53",
"field54",
"field55",
"field56",
"field57",
"field58",
"field59",
"field60",
"field61",
"field62",
"field63",
"field64",
"field65",
"field66",
"field67",
"field68",
"field69",
"field70",
"field71",
"field72",
"field73",
"field74",
"field75",
"field76",
"field77",
"field78",
"field79",
"field80",
"field81",
"field82",
"field83",
"field84",
"field85",
"field86",
"field87",
"field88",
"field89",
"field90",
"field91",
"field92",
"field93",
"field94",
"field95",
"field96",
"field97",
"field98",
"field99",
"field100",
"field101",
"field102",
"field103",
"field104",
"field105",
"field106",
"field107",
"field108",
"field109",
"field110",
"field111",
"field112",
"field113",
"field114",
"field115",
"field116",
"field117",
"field118",
"field119",
"field120",
"field121",
"field122",
"field123",
"field124",
"field125",
"field126",
"field127",
"field128",
"field129",
"field130",
"field131",
"field132",
"field133",
"field134",
"field135",
"field136",
"field137",
"field138",
"field139",
"field140",
"field141",
"field142",
"field143",
"field144",
"field145",
"field146",
"field147",
"field148",
"field149",
"field150"
],
"connection":[
{
"jdbcUrl":"jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false",
"table":[
"t_dx_mysql150"
]
}
],
"password":"********",
"preSql":[
],
"session":[
],
"username":"root",
"writeMode":"insert",
"batchSize":2048
}
}
}
],
"setting":{
"speed":{
"channel":"2"
}
}
}
2024-04-08 16:05:01.422 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false
2024-04-08 16:05:01.422 [main] INFO JobContainer - DataX jobContainer starts job.
2024-04-08 16:05:01.424 [main] INFO JobContainer - Set jobId = 0
2024-04-08 16:05:02.021 [job-0] INFO OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000.
2024-04-08 16:05:02.266 [job-0] INFO OriginalConfPretreatmentUtil - table:[t_dx_mysql150] has columns:[id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150].
2024-04-08 16:05:02.588 [job-0] INFO OriginalConfPretreatmentUtil - table:[t_dx_mysql150] all columns:[
id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150
].
2024-04-08 16:05:02.605 [job-0] INFO OriginalConfPretreatmentUtil - Write data [
insert INTO %s (id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
], which jdbcUrl like:[jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false&yearIsDateType=false&zeroDateTimeBehavior=convertToNull&rewriteBatchedStatements=true&tinyInt1isBit=false]
2024-04-08 16:05:02.606 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
2024-04-08 16:05:02.607 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] do prepare work .
2024-04-08 16:05:02.607 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
2024-04-08 16:05:02.608 [job-0] INFO JobContainer - jobContainer starts to do split ...
2024-04-08 16:05:02.608 [job-0] INFO JobContainer - Job set Channel-Number to 2 channels.
2024-04-08 16:05:02.613 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] splits to [1] tasks.
2024-04-08 16:05:02.614 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
2024-04-08 16:05:02.654 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
2024-04-08 16:05:02.659 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
2024-04-08 16:05:02.661 [job-0] INFO JobContainer - Running by standalone Mode.
2024-04-08 16:05:02.670 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
2024-04-08 16:05:02.675 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
2024-04-08 16:05:02.675 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
2024-04-08 16:05:02.754 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
2024-04-08 16:05:02.758 [0-0-0-reader] INFO CommonRdbmsReader$Task - Begin to read record by Sql: [select id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150 from t_dx_mysql150 where (id < 1500000)
] jdbcUrl:[jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000].
2024-04-08 16:05:12.957 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:05:23.398 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:05:35.974 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:05:46.930 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:06:26.418 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:07:06.146 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:08:52.454 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
java.lang.OutOfMemoryError: GC overhead limit exceeded
Dumping heap to /opt/datax/log/java_pid18528.hprof ...
Heap dump file created [4682159065 bytes in 21.500 secs]
2024-04-08 16:13:47.807 [job-0] ERROR JobContainer - Exception when job run
java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.util.Arrays.copyOfRange(Arrays.java:3664) ~[na:1.8.0_144]
at java.lang.String.<init>(String.java:207) ~[na:1.8.0_144]
at java.lang.String.substring(String.java:1969) ~[na:1.8.0_144]
at java.lang.String.subSequence(String.java:2003) ~[na:1.8.0_144]
at java.util.regex.Matcher.getSubSequence(Matcher.java:1294) ~[na:1.8.0_144]
at java.util.regex.Matcher.group(Matcher.java:541) ~[na:1.8.0_144]
at java.util.Formatter$FormatSpecifier.<init>(Formatter.java:2720) ~[na:1.8.0_144]
at java.util.Formatter.parse(Formatter.java:2560) ~[na:1.8.0_144]
at java.util.Formatter.format(Formatter.java:2501) ~[na:1.8.0_144]
at java.util.Formatter.format(Formatter.java:2455) ~[na:1.8.0_144]
at java.lang.String.format(String.java:2940) ~[na:1.8.0_144]
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getTotal(CommunicationTool.java:156) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getSnapshot(CommunicationTool.java:119) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.statistics.container.communicator.job.StandAloneJobContainerCommunicator.report(StandAloneJobContainerCommunicator.java:50) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.job.scheduler.AbstractScheduler.schedule(AbstractScheduler.java:84) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.job.JobContainer.schedule(JobContainer.java:535) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:119) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.Engine.start(Engine.java:86) [datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.Engine.entry(Engine.java:168) [datax-core-0.0.1-SNAPSHOT.jar:na]
at com.alibaba.datax.core.Engine.main(Engine.java:201) [datax-core-0.0.1-SNAPSHOT.jar:na]
2024-04-08 16:13:47.852 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 16:13:47.857 [job-0] INFO VMInfo -
[delta cpu info] =>
curDeltaCpu | averageCpu | maxDeltaCpu | minDeltaCpu
-1.00% | -1.00% | -1.00% | -1.00%
[delta memory info] =>
NAME | used_size | used_percent | max_used_size | max_percent
PS Eden Space | 0.04MB | 0.01% | 0.04MB | 0.01%
Code Cache | 5.32MB | 99.02% | 5.32MB | 99.02%
Compressed Class Space | 2.24MB | 94.22% | 2.24MB | 94.22%
PS Survivor Space | 0.00MB | 0.00% | 0.00MB | 0.00%
PS Old Gen | 8.38MB | 0.32% | 8.38MB | 0.32%
Metaspace | 20.66MB | 98.17% | 20.66MB | 98.17%
[delta gc info] =>
NAME | curDeltaGCCount | totalGCCount | maxDeltaGCCount | minDeltaGCCount | curDeltaGCTime | totalGCTime | maxDeltaGCTime | minDeltaGCTime
PS MarkSweep | 53 | 53 | 53 | 53 | 488.014s | 488.014s | 488.014s | 488.014s
PS Scavenge | 7 | 7 | 7 | 7 | 4.717s | 4.717s | 4.717s | 4.717s
2024-04-08 16:13:47.861 [job-0] ERROR Engine -
经DataX智能分析,该任务最可能的错误原因是:
com.alibaba.datax.common.exception.DataXException: Code:[Framework-02], Description:[DataX引擎运行过程出错,具体原因请参看DataX运行结束时的错误诊断信息 .]. - java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.util.Arrays.copyOfRange(Arrays.java:3664)
at java.lang.String.<init>(String.java:207)
at java.lang.String.substring(String.java:1969)
at java.lang.String.subSequence(String.java:2003)
at java.util.regex.Matcher.getSubSequence(Matcher.java:1294)
at java.util.regex.Matcher.group(Matcher.java:541)
at java.util.Formatter$FormatSpecifier.<init>(Formatter.java:2720)
at java.util.Formatter.parse(Formatter.java:2560)
at java.util.Formatter.format(Formatter.java:2501)
at java.util.Formatter.format(Formatter.java:2455)
at java.lang.String.format(String.java:2940)
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getTotal(CommunicationTool.java:156)
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getSnapshot(CommunicationTool.java:119)
at com.alibaba.datax.core.statistics.container.communicator.job.StandAloneJobContainerCommunicator.report(StandAloneJobContainerCommunicator.java:50)
at com.alibaba.datax.core.job.scheduler.AbstractScheduler.schedule(AbstractScheduler.java:84)
at com.alibaba.datax.core.job.JobContainer.schedule(JobContainer.java:535)
at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:119)
at com.alibaba.datax.core.Engine.start(Engine.java:86)
at com.alibaba.datax.core.Engine.entry(Engine.java:168)
at com.alibaba.datax.core.Engine.main(Engine.java:201)
- java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.util.Arrays.copyOfRange(Arrays.java:3664)
at java.lang.String.<init>(String.java:207)
at java.lang.String.substring(String.java:1969)
at java.lang.String.subSequence(String.java:2003)
at java.util.regex.Matcher.getSubSequence(Matcher.java:1294)
at java.util.regex.Matcher.group(Matcher.java:541)
at java.util.Formatter$FormatSpecifier.<init>(Formatter.java:2720)
at java.util.Formatter.parse(Formatter.java:2560)
at java.util.Formatter.format(Formatter.java:2501)
at java.util.Formatter.format(Formatter.java:2455)
at java.lang.String.format(String.java:2940)
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getTotal(CommunicationTool.java:156)
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getSnapshot(CommunicationTool.java:119)
at com.alibaba.datax.core.statistics.container.communicator.job.StandAloneJobContainerCommunicator.report(StandAloneJobContainerCommunicator.java:50)
at com.alibaba.datax.core.job.scheduler.AbstractScheduler.schedule(AbstractScheduler.java:84)
at com.alibaba.datax.core.job.JobContainer.schedule(JobContainer.java:535)
at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:119)
at com.alibaba.datax.core.Engine.start(Engine.java:86)
at com.alibaba.datax.core.Engine.entry(Engine.java:168)
at com.alibaba.datax.core.Engine.main(Engine.java:201)
at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:48)
at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:162)
at com.alibaba.datax.core.Engine.start(Engine.java:86)
at com.alibaba.datax.core.Engine.entry(Engine.java:168)
at com.alibaba.datax.core.Engine.main(Engine.java:201)
Caused by: java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.util.Arrays.copyOfRange(Arrays.java:3664)
at java.lang.String.<init>(String.java:207)
at java.lang.String.substring(String.java:1969)
at java.lang.String.subSequence(String.java:2003)
at java.util.regex.Matcher.getSubSequence(Matcher.java:1294)
at java.util.regex.Matcher.group(Matcher.java:541)
at java.util.Formatter$FormatSpecifier.<init>(Formatter.java:2720)
at java.util.Formatter.parse(Formatter.java:2560)
at java.util.Formatter.format(Formatter.java:2501)
at java.util.Formatter.format(Formatter.java:2455)
at java.lang.String.format(String.java:2940)
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getTotal(CommunicationTool.java:156)
at com.alibaba.datax.core.statistics.communication.CommunicationTool$Stringify.getSnapshot(CommunicationTool.java:119)
at com.alibaba.datax.core.statistics.container.communicator.job.StandAloneJobContainerCommunicator.report(StandAloneJobContainerCommunicator.java:50)
at com.alibaba.datax.core.job.scheduler.AbstractScheduler.schedule(AbstractScheduler.java:84)
at com.alibaba.datax.core.job.JobContainer.schedule(JobContainer.java:535)
at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:119)
... 3 more
(3) 同步150W以内行数据(跳过OutOfMemoryError报错)
对jvm参数进程调整:python datax.py --jvm=“-Xms12G -Xmx15G” gb2mysql150.json
[root@keep-hadoop bin]# python datax.py --jvm="-Xms12G -Xmx15G" gb2mysql150.json
DataX (DATAX-OPENSOURCE-3.0), From Alibaba !
Copyright (C) 2010-2017, Alibaba Group. All Rights Reserved.
2024-04-08 17:14:43.143 [main] INFO MessageSource - JVM TimeZone: GMT+08:00, Locale: zh_CN
2024-04-08 17:14:43.146 [main] INFO MessageSource - use Locale: zh_CN timeZone: sun.util.calendar.ZoneInfo[id="GMT+08:00",offset=28800000,dstSavings=0,useDaylight=false,transitions=0,lastRule=null]
2024-04-08 17:14:43.166 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
2024-04-08 17:14:43.174 [main] INFO Engine - the machine info =>
osInfo: Linux amd64 3.10.0-1160.el7.x86_64
jvmInfo: Oracle Corporation 1.8 25.144-b01
cpu num: 4
totalPhysicalMemory: -0.00G
freePhysicalMemory: -0.00G
maxFileDescriptorCount: -1
currentOpenFileDescriptorCount: -1
GC Names [PS MarkSweep, PS Scavenge]
MEMORY_NAME | allocation_size | init_size
PS Eden Space | 4,096.00MB | 3,072.00MB
Code Cache | 240.00MB | 2.44MB
Compressed Class Space | 1,024.00MB | 0.00MB
PS Survivor Space | 512.00MB | 512.00MB
PS Old Gen | 10,240.00MB | 8,192.00MB
Metaspace | -0.00MB | 0.00MB
2024-04-08 17:14:43.206 [main] INFO Engine -
{
"content":[
{
"reader":{
"name":"rdbmsreader",
"parameter":{
"column":[
"id",
"field1",
"field2",
"field3",
"field4",
"field5",
"field6",
"field7",
"field8",
"field9",
"field10",
"field11",
"field12",
"field13",
"field14",
"field15",
"field16",
"field17",
"field18",
"field19",
"field20",
"field21",
"field22",
"field23",
"field24",
"field25",
"field26",
"field27",
"field28",
"field29",
"field30",
"field31",
"field32",
"field33",
"field34",
"field35",
"field36",
"field37",
"field38",
"field39",
"field40",
"field41",
"field42",
"field43",
"field44",
"field45",
"field46",
"field47",
"field48",
"field49",
"field50",
"field51",
"field52",
"field53",
"field54",
"field55",
"field56",
"field57",
"field58",
"field59",
"field60",
"field61",
"field62",
"field63",
"field64",
"field65",
"field66",
"field67",
"field68",
"field69",
"field70",
"field71",
"field72",
"field73",
"field74",
"field75",
"field76",
"field77",
"field78",
"field79",
"field80",
"field81",
"field82",
"field83",
"field84",
"field85",
"field86",
"field87",
"field88",
"field89",
"field90",
"field91",
"field92",
"field93",
"field94",
"field95",
"field96",
"field97",
"field98",
"field99",
"field100",
"field101",
"field102",
"field103",
"field104",
"field105",
"field106",
"field107",
"field108",
"field109",
"field110",
"field111",
"field112",
"field113",
"field114",
"field115",
"field116",
"field117",
"field118",
"field119",
"field120",
"field121",
"field122",
"field123",
"field124",
"field125",
"field126",
"field127",
"field128",
"field129",
"field130",
"field131",
"field132",
"field133",
"field134",
"field135",
"field136",
"field137",
"field138",
"field139",
"field140",
"field141",
"field142",
"field143",
"field144",
"field145",
"field146",
"field147",
"field148",
"field149",
"field150"
],
"connection":[
{
"jdbcUrl":[
"jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000"
],
"table":[
"t_dx_mysql150"
]
}
],
"password":"*************",
"username":"gbase",
"where":"id < 1500000"
}
},
"writer":{
"name":"mysqlwriter",
"parameter":{
"column":[
"id",
"field1",
"field2",
"field3",
"field4",
"field5",
"field6",
"field7",
"field8",
"field9",
"field10",
"field11",
"field12",
"field13",
"field14",
"field15",
"field16",
"field17",
"field18",
"field19",
"field20",
"field21",
"field22",
"field23",
"field24",
"field25",
"field26",
"field27",
"field28",
"field29",
"field30",
"field31",
"field32",
"field33",
"field34",
"field35",
"field36",
"field37",
"field38",
"field39",
"field40",
"field41",
"field42",
"field43",
"field44",
"field45",
"field46",
"field47",
"field48",
"field49",
"field50",
"field51",
"field52",
"field53",
"field54",
"field55",
"field56",
"field57",
"field58",
"field59",
"field60",
"field61",
"field62",
"field63",
"field64",
"field65",
"field66",
"field67",
"field68",
"field69",
"field70",
"field71",
"field72",
"field73",
"field74",
"field75",
"field76",
"field77",
"field78",
"field79",
"field80",
"field81",
"field82",
"field83",
"field84",
"field85",
"field86",
"field87",
"field88",
"field89",
"field90",
"field91",
"field92",
"field93",
"field94",
"field95",
"field96",
"field97",
"field98",
"field99",
"field100",
"field101",
"field102",
"field103",
"field104",
"field105",
"field106",
"field107",
"field108",
"field109",
"field110",
"field111",
"field112",
"field113",
"field114",
"field115",
"field116",
"field117",
"field118",
"field119",
"field120",
"field121",
"field122",
"field123",
"field124",
"field125",
"field126",
"field127",
"field128",
"field129",
"field130",
"field131",
"field132",
"field133",
"field134",
"field135",
"field136",
"field137",
"field138",
"field139",
"field140",
"field141",
"field142",
"field143",
"field144",
"field145",
"field146",
"field147",
"field148",
"field149",
"field150"
],
"connection":[
{
"jdbcUrl":"jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false",
"table":[
"t_dx_mysql150"
]
}
],
"password":"********",
"preSql":[
],
"session":[
],
"username":"root",
"writeMode":"insert",
"batchSize":2048
}
}
}
],
"setting":{
"speed":{
"channel":"2"
}
}
}
2024-04-08 17:14:43.234 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false
2024-04-08 17:14:43.235 [main] INFO JobContainer - DataX jobContainer starts job.
2024-04-08 17:14:43.236 [main] INFO JobContainer - Set jobId = 0
2024-04-08 17:14:43.816 [job-0] INFO OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000.
2024-04-08 17:14:43.921 [job-0] INFO OriginalConfPretreatmentUtil - table:[t_dx_mysql150] has columns:[id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150].
2024-04-08 17:14:44.249 [job-0] INFO OriginalConfPretreatmentUtil - table:[t_dx_mysql150] all columns:[
id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150
].
2024-04-08 17:14:44.265 [job-0] INFO OriginalConfPretreatmentUtil - Write data [
insert INTO %s (id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
], which jdbcUrl like:[jdbc:mysql://192.168.122.28:3306/test?rewriteBatchedStatements=true&characterEncoding=utf8&useSSL=false&yearIsDateType=false&zeroDateTimeBehavior=convertToNull&rewriteBatchedStatements=true&tinyInt1isBit=false]
2024-04-08 17:14:44.265 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
2024-04-08 17:14:44.266 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] do prepare work .
2024-04-08 17:14:44.266 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
2024-04-08 17:14:44.267 [job-0] INFO JobContainer - jobContainer starts to do split ...
2024-04-08 17:14:44.267 [job-0] INFO JobContainer - Job set Channel-Number to 2 channels.
2024-04-08 17:14:44.272 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] splits to [1] tasks.
2024-04-08 17:14:44.272 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
2024-04-08 17:14:44.317 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
2024-04-08 17:14:44.321 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
2024-04-08 17:14:44.324 [job-0] INFO JobContainer - Running by standalone Mode.
2024-04-08 17:14:44.435 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
2024-04-08 17:14:44.440 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
2024-04-08 17:14:44.440 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
2024-04-08 17:14:44.457 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
2024-04-08 17:14:44.461 [0-0-0-reader] INFO CommonRdbmsReader$Task - Begin to read record by Sql: [select id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150 from t_dx_mysql150 where (id < 1500000)
] jdbcUrl:[jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000].
2024-04-08 17:14:57.954 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 17:15:07.956 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 17:15:45.734 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
2024-04-08 17:15:55.737 [job-0] INFO StandAloneJobContainerCommunicator - Total 4608 records, 4555929 bytes | Speed 444.91KB/s, 460 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 1.631s | All Task WaitReaderTime 27.833s | Percentage 0.00%
...
2024-04-08 17:34:22.730 [job-0] INFO StandAloneJobContainerCommunicator - Total 1425920 records, 1411402172 bytes | Speed 1.78MB/s, 1880 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 762.613s | All Task WaitReaderTime 230.857s | Percentage 0.00%
2024-04-08 17:34:27.541 [0-0-0-reader] INFO CommonRdbmsReader$Task - Finished read record by Sql: [select id,field1,field2,field3,field4,field5,field6,field7,field8,field9,field10,field11,field12,field13,field14,field15,field16,field17,field18,field19,field20,field21,field22,field23,field24,field25,field26,field27,field28,field29,field30,field31,field32,field33,field34,field35,field36,field37,field38,field39,field40,field41,field42,field43,field44,field45,field46,field47,field48,field49,field50,field51,field52,field53,field54,field55,field56,field57,field58,field59,field60,field61,field62,field63,field64,field65,field66,field67,field68,field69,field70,field71,field72,field73,field74,field75,field76,field77,field78,field79,field80,field81,field82,field83,field84,field85,field86,field87,field88,field89,field90,field91,field92,field93,field94,field95,field96,field97,field98,field99,field100,field101,field102,field103,field104,field105,field106,field107,field108,field109,field110,field111,field112,field113,field114,field115,field116,field117,field118,field119,field120,field121,field122,field123,field124,field125,field126,field127,field128,field129,field130,field131,field132,field133,field134,field135,field136,field137,field138,field139,field140,field141,field142,field143,field144,field145,field146,field147,field148,field149,field150 from t_dx_mysql150 where (id < 1500000)
] jdbcUrl:[jdbc:gbase://192.168.122.21:5258/test?queryTimeoutKillsConnection=false&connectTimeout=10000000&socketTimeout=10000000].
2024-04-08 17:34:28.046 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[1183591]ms
2024-04-08 17:34:28.046 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] completed it's tasks.
2024-04-08 17:34:32.732 [job-0] INFO StandAloneJobContainerCommunicator - Total 1442677 records, 1428000001 bytes | Speed 1.58MB/s, 1675 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 784.432s | All Task WaitReaderTime 231.484s | Percentage 100.00%
2024-04-08 17:34:32.733 [job-0] INFO AbstractScheduler - Scheduler accomplished all tasks.
2024-04-08 17:34:32.734 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do post work.
2024-04-08 17:34:32.735 [job-0] INFO JobContainer - DataX Reader.Job [rdbmsreader] do post work.
2024-04-08 17:34:32.736 [job-0] INFO JobContainer - DataX jobId [0] completed successfully.
2024-04-08 17:34:32.739 [job-0] INFO HookInvoker - No hook invoked, because base dir not exists or is a file: /opt/datax/hook
2024-04-08 17:34:32.740 [job-0] INFO JobContainer -
[total cpu info] =>
averageCpu | maxDeltaCpu | minDeltaCpu
-1.00% | -1.00% | -1.00%
[total gc info] =>
NAME | totalGCCount | maxDeltaGCCount | minDeltaGCCount | totalGCTime | maxDeltaGCTime | minDeltaGCTime
PS MarkSweep | 42 | 11 | 9 | 675.756s | 182.349s | 143.227s
PS Scavenge | 3 | 3 | 0 | 11.473s | 11.473s | 0.000s
2024-04-08 17:34:32.740 [job-0] INFO JobContainer - PerfTrace not enable!
2024-04-08 17:34:32.741 [job-0] INFO StandAloneJobContainerCommunicator - Total 1442677 records, 1428000001 bytes | Speed 1.15MB/s, 1214 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 784.432s | All Task WaitReaderTime 231.484s | Percentage 100.00%
2024-04-08 17:34:32.743 [job-0] INFO JobContainer -
任务启动时刻 : 2024-04-08 17:14:43
任务结束时刻 : 2024-04-08 17:34:32
任务总计耗时 : 1189s
任务平均流量 : 1.15MB/s
记录写入速度 : 1214rec/s
读出记录总数 : 1442677
读写失败总数 : 0