python binlog mysql_使用python 模仿mybinlog 命令 二进制分析mysql binlog

1 #!/usr/bin/env python

2 #-*- encoding: utf-8 -*-

3

4 importstruct,time,datetime,os5 importsys,decimal,getopt,types6 reload(sys)7 sys.setdefaultencoding('utf-8')8

9 classcolumn_type_dict:10 MYSQL_TYPE_DECIMAL=011 MYSQL_TYPE_TINY=1

12 MYSQL_TYPE_SHORT=2

13 MYSQL_TYPE_LONG=3

14 MYSQL_TYPE_FLOAT=4

15 MYSQL_TYPE_DOUBLE=5

16 MYSQL_TYPE_NULL=6

17 MYSQL_TYPE_TIMESTAMP=7

18 MYSQL_TYPE_LONGLONG=8

19 MYSQL_TYPE_INT24=9

20 MYSQL_TYPE_DATE=10

21 MYSQL_TYPE_TIME=11

22 MYSQL_TYPE_DATETIME=12

23 MYSQL_TYPE_YEAR=13

24 MYSQL_TYPE_NEWDATE=14

25 MYSQL_TYPE_VARCHAR=15

26 MYSQL_TYPE_BIT=16

27 MYSQL_TYPE_TIMESTAMP2=17

28 MYSQL_TYPE_DATETIME2=18

29 MYSQL_TYPE_TIME2=19

30 MYSQL_TYPE_JSON=245

31 MYSQL_TYPE_NEWDECIMAL=246

32 MYSQL_TYPE_ENUM=247

33 MYSQL_TYPE_SET=248

34 MYSQL_TYPE_TINY_BLOB=249

35 MYSQL_TYPE_MEDIUM_BLOB=250

36 MYSQL_TYPE_LONG_BLOB=251

37 MYSQL_TYPE_BLOB=252

38 MYSQL_TYPE_VAR_STRING=253

39 MYSQL_TYPE_STRING=254

40 MYSQL_TYPE_GEOMETRY=255

41

42 classbinlog_events:43 UNKNOWN_EVENT=044 START_EVENT_V3= 1

45 QUERY_EVENT= 2

46 STOP_EVENT= 3

47 ROTATE_EVENT= 4

48 INTVAR_EVENT= 5

49 LOAD_EVENT= 6

50 SLAVE_EVENT= 7

51 CREATE_FILE_EVENT= 8

52 APPEND_BLOCK_EVENT= 9

53 EXEC_LOAD_EVENT= 10

54 DELETE_FILE_EVENT= 11

55 NEW_LOAD_EVENT= 12

56 RAND_EVENT= 13

57 USER_VAR_EVENT= 14

58 FORMAT_DESCRIPTION_EVENT= 15

59 XID_EVENT= 16

60 BEGIN_LOAD_QUERY_EVENT= 17

61 EXECUTE_LOAD_QUERY_EVENT= 18

62 TABLE_MAP_EVENT = 19

63 PRE_GA_WRITE_ROWS_EVENT = 20

64 PRE_GA_UPDATE_ROWS_EVENT = 21

65 PRE_GA_DELETE_ROWS_EVENT = 22

66 WRITE_ROWS_EVENT = 23

67 UPDATE_ROWS_EVENT = 24

68 DELETE_ROWS_EVENT = 25

69 INCIDENT_EVENT= 26

70 HEARTBEAT_LOG_EVENT= 27

71 IGNORABLE_LOG_EVENT= 28

72 ROWS_QUERY_LOG_EVENT= 29

73 WRITE_ROWS_EVENT = 30

74 UPDATE_ROWS_EVENT = 31

75 DELETE_ROWS_EVENT = 32

76 GTID_LOG_EVENT= 33

77 ANONYMOUS_GTID_LOG_EVENT= 34

78 PREVIOUS_GTIDS_LOG_EVENT= 35

79

80 classjson_type:81 NULL_COLUMN = 251

82 UNSIGNED_CHAR_COLUMN = 251

83 UNSIGNED_SHORT_COLUMN = 252

84 UNSIGNED_INT24_COLUMN = 253

85 UNSIGNED_INT64_COLUMN = 254

86 UNSIGNED_CHAR_LENGTH = 1

87 UNSIGNED_SHORT_LENGTH = 2

88 UNSIGNED_INT24_LENGTH = 3

89 UNSIGNED_INT64_LENGTH = 8

90

91 JSONB_TYPE_SMALL_OBJECT = 0x0

92 JSONB_TYPE_LARGE_OBJECT = 0x1

93 JSONB_TYPE_SMALL_ARRAY = 0x2

94 JSONB_TYPE_LARGE_ARRAY = 0x3

95 JSONB_TYPE_LITERAL = 0x4

96 JSONB_TYPE_INT16 = 0x5

97 JSONB_TYPE_UINT16 = 0x6

98 JSONB_TYPE_INT32 = 0x7

99 JSONB_TYPE_UINT32 = 0x8

100 JSONB_TYPE_INT64 = 0x9

101 JSONB_TYPE_UINT64 = 0xA

102 JSONB_TYPE_DOUBLE = 0xB

103 JSONB_TYPE_STRING = 0xC

104 JSONB_TYPE_OPAQUE = 0xF

105

106 JSONB_LITERAL_NULL = 0x0

107 JSONB_LITERAL_TRUE = 0x1

108 JSONB_LITERAL_FALSE = 0x2

109

110 BINLOG_FILE_HEADER = b'\xFE\x62\x69\x6E'

111 binlog_event_header_len = 19

112 binlog_event_fix_part = 13

113 binlog_quer_event_stern = 4

114 binlog_row_event_extra_headers = 2

115 read_format_desc_event_length = 56

116 binlog_xid_event_length = 8

117 table_map_event_fix_length = 8

118 fix_length = 8

119

120 class_rollback:121 rollback_status =None122

123 database =None124 table =None125 _gtid =None126

127 _myfile =None128 _myfunc =None129

130 class_remote_filed:131 _gtid =None132 _gtid_status =None133

134 _thread_id =None135 _tid_status =None136 _tid_gid =None137 _tid_gid_pos =None138 _tid_gid_time =None139

140 _rollback_status =None141

142

143 classEcho(object):144 '''

145 print binlog146 '''

147

148 defVersion(self, binlog_ver, server_ver, create_time):149 print 'binlog_ver : {} server_ver : {} create_time : {}'.format(binlog_ver, server_ver, create_time)150

151

152 defTractionHeader(self, thread_id, database_name, sql_statement, timestamp,_pos):153

154 if_remote_filed._thread_id:155 if _remote_filed._thread_id ==thread_id:156 self.Gtid(timestamp,_remote_filed._tid_gid,_remote_filed._tid_gid_pos)157 print '{} GTID_NEXT : {} at pos : {}'.format(_remote_filed._tid_gid_time, _remote_filed._tid_gid, _remote_filed._tid_gid_pos)158 print '{} thread id : {} at pos : {} database : {} statement : {}'.format(timestamp, thread_id, _pos,159 database_name, sql_statement)160 _remote_filed._tid_status =True161 if_remote_filed._rollback_status:162 _rollback.database =database_name163 elif_remote_filed._gtid:164 if_remote_filed._gtid_status:165 print '{} thread id : {} at pos : {} database : {} statement : {}'.format(timestamp, thread_id,166 _pos,167 database_name,168 sql_statement)169 if_remote_filed._rollback_status:170 _rollback.database =database_name171 elif_rollback.rollback_status:172 _rollback.database =database_name173 else:174 print '{} thread id : {} at pos : {} database : {} statement : {}'.format(timestamp, thread_id, _pos, database_name, sql_statement)175

176 defXid(self, timestamp, xid_num,_pos):177 if_remote_filed._thread_id:178 if_remote_filed._tid_status:179 _remote_filed._tid_status =None180 print '{} statement : COMMIT xid : {} at pos : {}'.format(timestamp, xid_num, _pos)181 print ''

182 elif_remote_filed._gtid:183 if_remote_filed._gtid_status:184 print '{} statement : COMMIT xid : {} at pos : {}'.format(timestamp, xid_num, _pos)185 print ''

186 raise ''

187 elif_rollback.rollback_status:188 _rollback._myfunc.SaveGtid(xid=True)189 else:190 print '{} statement : COMMIT xid : {} at pos : {}'.format(timestamp, xid_num, _pos)191 print ''

192

193 defTablemap(self, timestamp, tablename):194 if_remote_filed._thread_id:195 if_remote_filed._tid_status:196 print '{} tablename : {}'.format(timestamp, tablename)197 if_remote_filed._rollback_status:198 _rollback.table =tablename199 elif_remote_filed._gtid:200 if_remote_filed._gtid_status:201 print '{} tablename : {}'.format(timestamp, tablename)202 if_remote_filed._rollback_status:203 _rollback.table =tablename204 elif_rollback.rollback_status:205 _rollback.table =tablename206 else:207 print '{} tablename : {}'.format(timestamp, tablename)208 defGtid(self, timestamp, gtid,_pos):209 if_remote_filed._thread_id:210 _remote_filed._tid_gid,_remote_filed._tid_gid_pos,_remote_filed._tid_gid_time =gtid,_pos,timestamp211 elif_remote_filed._gtid:212 if _remote_filed._gtid ==gtid:213 print '{} GTID_NEXT : {} at pos : {}'.format(timestamp, gtid, _pos)214 _remote_filed._gtid_status =True215 elif_rollback.rollback_status:216 _rollback._myfunc.SaveGtid(gtid=gtid)217 else:218 print '{} GTID_NEXT : {} at pos : {}'.format(timestamp, gtid,_pos)219

220 def TractionVlues(self, before_value=None, after_value=None, type=None):221 if_remote_filed._thread_id:222 if_remote_filed._tid_status:223 if_remote_filed._rollback_status:224 _rollback._myfunc.CreateSQL(before_value=before_value, after_value=after_value, event_type=type)225 else:226 self._tv(before_value=before_value, after_value=after_value, type=type)227 elif_remote_filed._gtid:228 if_remote_filed._gtid_status:229 if_remote_filed._rollback_status:230 _rollback._myfunc.CreateSQL(before_value=before_value, after_value=after_value, event_type=type)231 else:232 self._tv(before_value=before_value, after_value=after_value, type=type)233 elif_rollback.rollback_status:234 _rollback._myfunc.CreateSQL(before_value=before_value,after_value=after_value,event_type=type)235 else:236 self._tv(before_value=before_value,after_value=after_value,type=type)237

238 def _tv(self,before_value=None, after_value=None, type=None):239 if type ==binlog_events.UPDATE_ROWS_EVENT:240 print '{: >21}{} before_value : [{}] after_value : [{}]'.format('', 'UPDATE_ROWS_EVENT',241 ','.join(['{}'.format(a) for a in

242 before_value]), ','.join(243 ['{}'.format(a) for a inafter_value]))244 else:245 if type ==binlog_events.DELETE_ROWS_EVENT:246 print '{: >21}{} value : [{}]'.format('', 'DELETE_ROW_EVENT',247 ','.join(['{}'.format(a) for a inafter_value]))248 elif type ==binlog_events.WRITE_ROWS_EVENT:249 print '{: >21}{} value : [{}]'.format('', 'WRITE_ROW_EVENT',250 ','.join(['{}'.format(a) for a inafter_value]))251

252 classPrintSql(object):253 def __seek(self,num):254 try:255 _rollback._myfile.seek(-num,1)256 except:257 _rollback._myfile.close()258 self.rmfile()259 sys.exit()260 defread(self):261 _num = 9

262 _rollback._myfile.seek(0,2)263 whileTrue:264 self.__seek(_num)265 _value,_type_code, = struct.unpack('QB',_rollback._myfile.read(_num))266 self.__seek(_num)267 if _type_code == 1:268 self.__gtid(_value)269 elif _type_code == 2:270 self.__statement(_value)271 else:272 print 'Error: type_code {}'.format(_type_code)273 def __gtid(self,tid):274 self.__seek(36)275 _uuid = _rollback._myfile.read(36)276 gtid = _uuid.decode('utf8') + ':' +str(tid)277 print ''

278 print '#{: >10} GTID_NEXT : {}'.format('-',gtid)279 self.__seek(36)280 def __statement(self,length):281 self.__seek(length)282 _sql =_rollback._myfile.read(length)283 sql, = struct.unpack('{}s'.format(length),_sql)284 print '{: >10}{}'.format('',sql)285 self.__seek(length)286

287 defrmfile(self):288 os.remove('tmp_rollback')289

290 classGetRollStatement(object):291 def __init__(self,host,user,passwd,port=None):292 importpymysql,traceback293 self.port = port if port != None else 3306

294 try:295 self.local_conn = pymysql.connect(host=host, user=user, passwd=passwd, port=port, db='',296 charset="utf8")297 exceptpymysql.Error,e:298 printtraceback.format_exc()299 self.column_list =[]300 self._is_pri =[]301 def __join(self,column,value):302 if type(value) istypes.StringType:303 if value == 'Null':304 return '{}={}'.format(column, value)305 else:306 return '{}="{}"'.format(column, value)307 else:308 return '{}={}'.format(column,value)309 defWriteEvent(self,values):310 sql = 'delete from {}.{} where'.format(_rollback.database,_rollback.table)311 ifself._is_pri:312 sql += self.__join(self._is_pri[0][0],values[self._is_pri[0][1]])313 else:314 for i,column inenumerate(self.column_list):315 sql += self.__join(column[0],values[i])316 if column != self.column_list[-1]:317 sql += 'and'

318 if_remote_filed._rollback_status:319 print '{: >21}{}{}'.format('', '--', sql)320 else:321 self.__tmppack(sql,2)322 defDeleteEvent(self,values):323 sql = 'insert into {}.{}({}) values('.format(_rollback.database,_rollback.table,','.join([a[0] for a inself.column_list]))324 for idex,value inenumerate(values):325 if type(value) istypes.StringType:326 if value == 'Null':327 sql += '{}'.format(value)328 else:329 sql += '"{}"'.format(value)330 else:331 sql += '{}'.format(value)332 if len(values[idex:]) <= 1:333 sql += ')'

334 else:335 sql += ','

336 if_remote_filed._rollback_status:337 print '{: >21}{}{}'.format('', '--', sql)338 else:339 self.__tmppack(sql, 2)340 defUpateEvent(self,after_values,befor_values):341 _set =[]342 _where =[]343 ifself._is_pri:344 _where.append(self.__join(self._is_pri[0][0],after_values[self._is_pri[0][1]]))345 else:346 for i,column inenumerate(self.column_list):347 _where.append(self.__join(column[0],after_values[i]))348

349 for i,column inenumerate(self.column_list):350 _set.append(self.__join(column[0],befor_values[i]))351 sql = 'update {}.{} set {} where {}'.format(_rollback.database, _rollback.table, ','.join(_set).replace(" ",""), ','.join(_where))352 if_remote_filed._rollback_status:353 print '{: >21}{}{}'.format('', '--',sql)354 else:355 self.__tmppack(sql, 2)356

357 defGetColumnName(self):358 with self.local_conn.cursor() as cur:359 sql = 'desc {}.{};'.format(_rollback.database,_rollback.table)360 cur.execute(sql)361 result =cur.fetchall()362 self.column_list = [[a[0],a[3]] for a inresult]363

364 def CreateSQL(self,before_value=None,after_value=None,event_type=None):365 self.GetColumnName()366 self._is_pri = [[_a[0], idex] for idex, _a in enumerate(self.column_list) if 'PRI' in_a]367 if event_type ==binlog_events.WRITE_ROWS_EVENT:368 self.WriteEvent(after_value)369 elif event_type ==binlog_events.UPDATE_ROWS_EVENT:370 self.UpateEvent(after_value,before_value)371 elif event_type ==binlog_events.DELETE_ROWS_EVENT:372 self.DeleteEvent(after_value)373

374 def SaveGtid(self,gtid=None,xid=None):375 ifxid:376 __gtid = _rollback._gtid.split(':')377 tid = int(__gtid[1])378 uuid = str(__gtid[0])379 self.__tmppackgtid(uuid,tid,1)380 elif _rollback._gtid !=gtid:381 _rollback._gtid =gtid382

383 def __tmppackgtid(self,uuid,tid,type):384 s_uuid = struct.Struct('{}s'.format(len(uuid)))385 s_header = struct.Struct('QB')386 _uuid =s_uuid.pack(uuid)387 _header =s_header.pack(tid,type)388 _rollback._myfile.write(_uuid)389 _rollback._myfile.write(_header)390 def __tmppack(self,value,type):391 importre392 _value = re.sub(r"\s{2,}"," ",str(value).strip()) + ';'

393 s_value = struct.Struct('{}s'.format(len(_value)))394 s_header = struct.Struct('QB')395 _value =s_value.pack(_value)396 _header =s_header.pack(len(_value),type)397 _rollback._myfile.write(_value)398 _rollback._myfile.write(_header)399

400 def_close(self):401 self.local_conn.close()402

403 classRead(Echo):404 def __init__(self,start_position=None,filename=None,pack=None):405 self.__packet =pack406 iffilename:407 self.file_data = open(filename, 'rb')408 read_byte = self.read_bytes(4)409 if read_byte !=BINLOG_FILE_HEADER:410 print 'error : Is not a standard binlog file format'

411 exit()412 ifstart_position:413 self.file_data.seek(start_position-4,1)414

415 def read_int_be_by_size(self, size ,bytes=None):416 '''Read a big endian integer values based on byte number'''

417 if bytes isNone:418 if size == 1:419 return struct.unpack('>b', self.read_bytes(size))[0]420 elif size == 2:421 return struct.unpack('>h', self.read_bytes(size))[0]422 elif size == 3:423 returnself.read_int24_be()424 elif size == 4:425 return struct.unpack('>i', self.read_bytes(size))[0]426 elif size == 5:427 returnself.read_int40_be()428 elif size == 8:429 return struct.unpack('>l', self.read_bytes(size))[0]430 else:431 '''used for read new decimal'''

432 if size == 1:433 return struct.unpack('>b', bytes[0:size])[0]434 elif size == 2:435 return struct.unpack('>h', bytes[0:size])[0]436 elif size == 3:437 returnself.read_int24_be(bytes)438 elif size == 4:439 return struct.unpack('>i',bytes[0:size])[0]440

441 def read_int24_be(self,bytes=None):442 if bytes isNone:443 a, b, c = struct.unpack('BBB', self.read_bytes(3))444 else:445 a, b, c = struct.unpack('BBB', bytes[0:3])446 res = (a << 16) | (b << 8) |c447 if res >= 0x800000:448 res -= 0x1000000

449 returnres450

451 defread_uint_by_size(self, size):452 '''Read a little endian integer values based on byte number'''

453 if size == 1:454 returnself.read_uint8()455 elif size == 2:456 returnself.read_uint16()457 elif size == 3:458 returnself.read_uint24()459 elif size == 4:460 returnself.read_uint32()461 elif size == 5:462 returnself.read_uint40()463 elif size == 6:464 returnself.read_uint48()465 elif size == 7:466 returnself.read_uint56()467 elif size == 8:468 returnself.read_uint64()469

470 defread_uint24(self):471 a, b, c = struct.unpack("

474 defread_int24(self):475 a, b, c = struct.unpack("

478 defread_uint40(self):479 a, b = struct.unpack("

482 defread_int40_be(self):483 a, b = struct.unpack(">IB", self.read_bytes(5))484 return b + (a << 8)485

486 defread_uint48(self):487 a, b, c = struct.unpack("

490 defread_uint56(self):491 a, b, c = struct.unpack("

494 defread_bytes(self, count):495 try:496 return self.file_data.read(count) if self.__packet is None else self.__packet.read(count)497 except:498 returnNone499

500 defread_uint64(self):501 read_byte = self.read_bytes(8)502 result, = struct.unpack('Q', read_byte)503 returnresult504

505 defread_int64(self):506 read_byte = self.read_bytes(8)507 result, = struct.unpack('q', read_byte)508 returnresult509

510 defread_uint32(self):511 read_byte = self.read_bytes(4)512 result, = struct.unpack('I', read_byte)513 returnresult514

515 defread_int32(self):516 read_byte = self.read_bytes(4)517 result, = struct.unpack('i', read_byte)518 returnresult519

520 defread_uint16(self):521 read_byte = self.read_bytes(2)522 result, = struct.unpack('H', read_byte)523 returnresult524

525 defread_int16(self):526 read_byte = self.read_bytes(2)527 result, = struct.unpack('h', read_byte)528 returnresult529

530 defread_uint8(self):531 read_byte = self.read_bytes(1)532 result, = struct.unpack('B', read_byte)533 returnresult534

535 defread_int8(self):536 read_byte = self.read_bytes(1)537 result, = struct.unpack('b', read_byte)538 returnresult539

540 defread_format_desc_event(self):541 binlog_ver, = struct.unpack('H',self.read_bytes(2))542 server_ver, = struct.unpack('50s',self.read_bytes(50))543 create_time, = struct.unpack('I',self.read_bytes(4))544 returnbinlog_ver,server_ver,create_time545

546 def __add_fsp_to_time(self, time, column):547 """Read and add the fractional part of time548 For more details about new date format:549 """

550 microsecond,read = self.__read_fsp(column)551 if microsecond >0:552 time = time.replace(microsecond=microsecond)553 returntime,read554

555 def __read_fsp(self, column):556 read =0557 if column == 1 or column == 2:558 read = 1

559 elif column == 3 or column == 4:560 read = 2

561 elif column == 5 or column == 6:562 read = 3

563 if read >0:564 microsecond =self.read_int_be_by_size(read)565 if column % 2:566 return int(microsecond / 10),read567 else:568 returnmicrosecond,read569

570 return0,0571

572

573 def __read_binary_slice(self, binary, start, size, data_length):574 """

575 Read a part of binary data and extract a number576 binary: the data577 start: From which bit (1 to X)578 size: How many bits should be read579 data_length: data size580 """

581 binary = binary >> data_length - (start +size)582 mask = ((1 << size) - 1)583 return binary &mask584

585 def __read_datetime2(self, column):586 """DATETIME587

588 1 bit sign (1= non-negative, 0= negative)589 17 bits year*13+month (year 0-9999, month 0-12)590 5 bits day (0-31)591 5 bits hour (0-23)592 6 bits minute (0-59)593 6 bits second (0-59)594 ---------------------------595 40 bits = 5 bytes596 """

597 data = self.read_int_be_by_size(5)598 year_month = self.__read_binary_slice(data, 1, 17, 40)599 try:600 t =datetime.datetime(601 year=int(year_month / 13),602 month=year_month % 13,603 day=self.__read_binary_slice(data, 18, 5, 40),604 hour=self.__read_binary_slice(data, 23, 5, 40),605 minute=self.__read_binary_slice(data, 28, 6, 40),606 second=self.__read_binary_slice(data, 34, 6, 40))607 exceptValueError:608 returnNone609 __time,read = self.__add_fsp_to_time(t, column)610 return __time,read611

612 def __read_time2(self, column):613 """TIME encoding for nonfractional part:614

615 1 bit sign (1= non-negative, 0= negative)616 1 bit unused (reserved for future extensions)617 10 bits hour (0-838)618 6 bits minute (0-59)619 6 bits second (0-59)620 ---------------------621 24 bits = 3 bytes622 """

623 data = self.read_int_be_by_size(3)624

625 sign = 1 if self.__read_binary_slice(data, 0, 1, 24) else -1

626 if sign == -1:627 '''

628 negative integers are stored as 2's compliment629 hence take 2's compliment again to get the right value.630 '''

631 data = ~data + 1

632

633 microseconds,read = self.__read_fsp(column)634 t =datetime.timedelta(635 hours=sign*self.__read_binary_slice(data, 2, 10, 24),636 minutes=self.__read_binary_slice(data, 12, 6, 24),637 seconds=self.__read_binary_slice(data, 18, 6, 24),638 microseconds=microseconds639 )640 return t,read+3

641

642 def __read_date(self):643 time =self.read_uint24()644 if time == 0: #nasty mysql 0000-00-00 dates

645 returnNone646

647 year = (time & ((1 << 15) - 1) << 9) >> 9

648 month = (time & ((1 << 4) - 1) << 5) >> 5

649 day = (time & ((1 << 5) - 1))650 if year == 0 or month == 0 or day ==0:651 returnNone652

653 date =datetime.date(654 year=year,655 month=month,656 day=day657 )658 returndate659

660 def __read_new_decimal(self, precision,decimals):661 """Read MySQL's new decimal format introduced in MySQL 5"""

662 '''

663 Each multiple of nine digits requires four bytes, and the “leftover” digits require some fraction of four bytes.664 The storage required for excess digits is given by the following table. Leftover Digits Number of Bytes665

666 Leftover Digits Number of Bytes667 0 0668 1 1669 2 1670 3 2671 4 2672 5 3673 6 3674 7 4675 8 4676

677 '''

678 digits_per_integer = 9

679 compressed_bytes = [0, 1, 1, 2, 2, 3, 3, 4, 4, 4]680 integral = (precision -decimals)681 uncomp_integral = int(integral /digits_per_integer)682 uncomp_fractional = int(decimals /digits_per_integer)683 comp_integral = integral - (uncomp_integral *digits_per_integer)684 comp_fractional = decimals -(uncomp_fractional685 *digits_per_integer)686

687 _read_bytes = (uncomp_integral*4) + (uncomp_fractional*4) + compressed_bytes[comp_fractional] +compressed_bytes[comp_integral]688

689 _data =bytearray(self.read_bytes(_read_bytes))690 value =_data[0]691 if value & 0x80 !=0:692 res = ""

693 mask =0694 else:695 mask = -1

696 res = "-"

697 _data[0] = struct.pack('

699 size =compressed_bytes[comp_integral]700 offset =0701 if size >0:702 offset +=size703 value = self.read_int_be_by_size(size=size,bytes=_data) ^mask704 res +=str(value)705

706 for i inrange(0, uncomp_integral):707 offset += 4

708 value = struct.unpack('>i', _data[offset-4:offset])[0] ^mask709 res += '%09d' %value710

711 res += "."

712

713 for i inrange(0, uncomp_fractional):714 offset += 4

715 value = struct.unpack('>i', _data[offset-4:offset])[0] ^mask716 res += '%09d' %value717

718 size =compressed_bytes[comp_fractional]719 if size >0:720 value = self.read_int_be_by_size(size=size,bytes=_data[offset:]) ^mask721 res += '%0*d' %(comp_fractional, value)722

723 returndecimal.Decimal(res),_read_bytes724

725 def __is_null(self, null_bitmap, position):726 bit = null_bitmap[int(position / 8)]727 if type(bit) isstr:728 bit =ord(bit)729 return bit & (1 << (position % 8))730

731 '''parsing for json'''

732 '''################################################################'''

733 defread_binary_json(self, length):734 t =self.read_uint8()735 returnself.read_binary_json_type(t, length)736

737 defread_binary_json_type(self, t, length):738 large = (t in(json_type.JSONB_TYPE_LARGE_OBJECT, json_type.JSONB_TYPE_LARGE_ARRAY))739 if t in(json_type.JSONB_TYPE_SMALL_OBJECT, json_type.JSONB_TYPE_LARGE_OBJECT):740 return self.read_binary_json_object(length - 1, large)741 elif t in(json_type.JSONB_TYPE_SMALL_ARRAY, json_type.JSONB_TYPE_LARGE_ARRAY):742 return self.read_binary_json_array(length - 1, large)743 elif t in(json_type.JSONB_TYPE_STRING,):744 return self.read_length_coded_pascal_string(1)745 elif t in(json_type.JSONB_TYPE_LITERAL,):746 value =self.read_uint8()747 if value ==json_type.JSONB_LITERAL_NULL:748 returnNone749 elif value ==json_type.JSONB_LITERAL_TRUE:750 returnTrue751 elif value ==json_type.JSONB_LITERAL_FALSE:752 returnFalse753 elif t ==json_type.JSONB_TYPE_INT16:754 returnself.read_int16()755 elif t ==json_type.JSONB_TYPE_UINT16:756 returnself.read_uint16()757 elif t in(json_type.JSONB_TYPE_DOUBLE,):758 return struct.unpack('

768 raise ValueError('Json type %d is not handled' %t)769

770 defread_binary_json_type_inlined(self, t):771 if t ==json_type.JSONB_TYPE_LITERAL:772 value =self.read_uint16()773 if value ==json_type.JSONB_LITERAL_NULL:774 returnNone775 elif value ==json_type.JSONB_LITERAL_TRUE:776 returnTrue777 elif value ==json_type.JSONB_LITERAL_FALSE:778 returnFalse779 elif t ==json_type.JSONB_TYPE_INT16:780 returnself.read_int16()781 elif t ==json_type.JSONB_TYPE_UINT16:782 returnself.read_uint16()783 elif t ==json_type.JSONB_TYPE_INT32:784 returnself.read_int32()785 elif t ==json_type.JSONB_TYPE_UINT32:786 returnself.read_uint32()787

788 raise ValueError('Json type %d is not handled' %t)789

790 defread_binary_json_object(self, length, large):791 iflarge:792 elements =self.read_uint32()793 size =self.read_uint32()794 else:795 elements =self.read_uint16()796 size =self.read_uint16()797

798 if size >length:799 raise ValueError('Json length is larger than packet length')800

801 iflarge:802 key_offset_lengths =[(803 self.read_uint32(), #offset (we don't actually need that)

804 self.read_uint16() #size of the key

805 ) for _ inrange(elements)]806 else:807 key_offset_lengths =[(808 self.read_uint16(), #offset (we don't actually need that)

809 self.read_uint16() #size of key

810 ) for _ inrange(elements)]811

812 value_type_inlined_lengths =[self.read_offset_or_inline(large)813 for _ inrange(elements)]814

815 keys = [self.__read_decode(x[1]) for x inkey_offset_lengths]816

817 out ={}818 for i inrange(elements):819 if value_type_inlined_lengths[i][1] isNone:820 data = value_type_inlined_lengths[i][2]821 else:822 t =value_type_inlined_lengths[i][0]823 data =self.read_binary_json_type(t, length)824 out[keys[i]] =data825

826 returnout827

828 defread_binary_json_array(self, length, large):829 iflarge:830 elements =self.read_uint32()831 size =self.read_uint32()832 else:833 elements =self.read_uint16()834 size =self.read_uint16()835

836 if size >length:837 raise ValueError('Json length is larger than packet length')838

839 values_type_offset_inline = [self.read_offset_or_inline(large) for _ inrange(elements)]840

841 def_read(x):842 if x[1] isNone:843 return x[2]844 returnself.read_binary_json_type(x[0], length)845

846 return [_read(x) for x invalues_type_offset_inline]847

848 defread_offset_or_inline(self,large):849 t =self.read_uint8()850

851 if t in(json_type.JSONB_TYPE_LITERAL,852 json_type.JSONB_TYPE_INT16, json_type.JSONB_TYPE_UINT16):853 return(t, None, self.read_binary_json_type_inlined(t))854 if large and t in(json_type.JSONB_TYPE_INT32, json_type.JSONB_TYPE_UINT32):855 return(t, None, self.read_binary_json_type_inlined(t))856

857 iflarge:858 return(t, self.read_uint32(), None)859 return(t, self.read_uint16(), None)860

861 defread_length_coded_pascal_string(self, size):862 """Read a string with length coded using pascal style.863 The string start by the size of the string864 """

865 length =self.read_uint_by_size(size)866 return self.__read_decode(length)867 '''###################################################'''

868

869 defread_header(self):870 '''binlog_event_header_len = 19871 timestamp : 4bytes872 type_code : 1bytes873 server_id : 4bytes874 event_length : 4bytes875 next_position : 4bytes876 flags : 2bytes877 '''

878 read_byte = self.read_bytes(19)879 ifread_byte:880 result = struct.unpack('=IBIIIH',read_byte)881 type_code,event_length,timestamp = result[1],result[3],result[0]882 return type_code,event_length,time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(timestamp))883 else:884 returnNone,None,None885

886 def read_query_event(self,event_length=None):887 '''fix_part = 13:888 thread_id : 4bytes889 execute_seconds : 4bytes890 database_length : 1bytes891 error_code : 2bytes892 variable_block_length : 2bytes893 variable_part :894 variable_block_length = fix_part.variable_block_length895 database_name = fix_part.database_length896 sql_statement = event_header.event_length - 19 - 13 - variable_block_length - database_length - 4897 '''

898 read_byte =self.read_bytes(binlog_event_fix_part)899 fix_result = struct.unpack('=IIBHH', read_byte)900 thread_id =fix_result[0]901 self.read_bytes(fix_result[4])902 read_byte = self.read_bytes(fix_result[2])903 database_name, = struct.unpack('{}s'.format(fix_result[2]), read_byte)904 statement_length = event_length - binlog_event_fix_part -binlog_event_header_len \905 - fix_result[4] - fix_result[2] -binlog_quer_event_stern906 read_byte =self.read_bytes(statement_length)907 _a, sql_statement, = struct.unpack('1s{}s'.format(statement_length - 1), read_byte)908 returnthread_id, database_name, sql_statement909

910 defread_table_map_event(self,event_length):911 '''

912 fix_part = 8913 table_id : 6bytes914 Reserved : 2bytes915 variable_part:916 database_name_length : 1bytes917 database_name : database_name_length bytes + 1918 table_name_length : 1bytes919 table_name : table_name_length bytes + 1920 cloums_count : 1bytes921 colums_type_array : one byte per column922 mmetadata_lenth : 1bytes923 metadata : .....(only available in the variable length field,varchar:2bytes,text、blob:1bytes,time、timestamp、datetime: 1bytes924 blob、float、decimal : 1bytes, char、enum、binary、set: 2bytes(column type id :1bytes metadatea: 1bytes))925 bit_filed : 1bytes926 crc : 4bytes927 .........928 :param event_length:929 :return:930 '''

931 self.read_bytes(table_map_event_fix_length)932 database_name_length, = struct.unpack('B',self.read_bytes(1))933 database_name,_a, = struct.unpack('{}ss'.format(database_name_length),self.read_bytes(database_name_length+1))934 table_name_length, = struct.unpack('B',self.read_bytes(1))935 table_name,_a, = struct.unpack('{}ss'.format(table_name_length),self.read_bytes(table_name_length+1))936 colums =self.read_uint8()937 a = '='

938 for i inrange(colums):939 a += 'B'

940 colums_type_id_list =list(struct.unpack(a,self.read_bytes(colums)))941 self.read_bytes(1)942 metadata_dict ={}943 bytes = 1

944 for idex inrange(len(colums_type_id_list)):945 if colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_VAR_STRING,column_type_dict.MYSQL_TYPE_VARCHAR]:946 metadata =self.read_uint16()947 metadata_dict[idex] = 2 if metadata > 255 else 1

948 bytes += 2

949 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_BLOB,column_type_dict.MYSQL_TYPE_MEDIUM_BLOB,column_type_dict.MYSQL_TYPE_LONG_BLOB,950 column_type_dict.MYSQL_TYPE_TINY_BLOB,column_type_dict.MYSQL_TYPE_JSON]:951 metadata =self.read_uint8()952 metadata_dict[idex] =metadata953 bytes += 1

954 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_TIMESTAMP2 ,column_type_dict.MYSQL_TYPE_DATETIME2 ,column_type_dict.MYSQL_TYPE_TIME2]:955 metadata =self.read_uint8()956 metadata_dict[idex] =metadata957 bytes += 1

958 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_NEWDECIMAL:959 precision =self.read_uint8()960 decimals =self.read_uint8()961 metadata_dict[idex] =[precision,decimals]962 bytes += 2

963 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_FLOAT ,column_type_dict.MYSQL_TYPE_DOUBLE]:964 metadata =self.read_uint8()965 metadata_dict[idex] =metadata966 bytes += 1

967 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_STRING]:968 _type,metadata, = struct.unpack('=BB',self.read_bytes(2))969 colums_type_id_list[idex] =_type970 metadata_dict[idex] =metadata971 bytes += 2

972

973 if self.__packet isNone:974 self.file_data.seek(event_length - binlog_event_header_len - table_map_event_fix_length - 5 -database_name_length975 - table_name_length - colums - bytes,1)976 returndatabase_name,table_name,colums_type_id_list,metadata_dict977

978 def read_gtid_event(self,event_length=None):979

980 self.read_bytes(1)981 uuid = self.read_bytes(16)982 gtid = "%s%s%s%s-%s%s-%s%s-%s%s-%s%s%s%s%s%s" %\983 tuple("{0:02x}".format(ord(c)) for c inuuid)984 gno_id =self.read_uint64()985 gtid += ":{}".format(gno_id)986 if self.__packet isNone:987 self.file_data.seek(event_length - 1 - 16 - 8 - binlog_event_header_len,1)988 returngtid989

990 defread_xid_variable(self):991 xid_num =self.read_uint64()992 returnxid_num993

994 def __read_decode(self,count):995 _value =self.read_bytes(count)996 return struct.unpack('{}s'.format(count),_value)[0]997

998 def read_row_event(self,event_length,colums_type_id_list,metadata_dict,type,packet=None):999 '''

1000 fixed_part: 10bytes1001 table_id: 6bytes1002 reserved: 2bytes1003 extra: 2bytes1004 variable_part:1005 columns: 1bytes1006 variable_sized: int((n+7)/8) n=columns.value1007 variable_sized: int((n+7)/8) (for updata_row_event only)1008

1009 variable_sized: int((n+7)/8)1010 row_value : variable size1011

1012 crc : 4bytes1013

1014 The The data first length of the varchar type more than 255 are 2 bytes1015 '''

1016 self.read_bytes(fix_length+binlog_row_event_extra_headers)1017 columns =self.read_uint8()1018 columns_length = (columns+7)/8

1019 self.read_bytes(columns_length)1020 if type ==binlog_events.UPDATE_ROWS_EVENT:1021 self.read_bytes(columns_length)1022 bytes = binlog_event_header_len + fix_length + binlog_row_event_extra_headers + 1 + columns_length +columns_length1023 else:1024 bytes = binlog_event_header_len + fix_length + binlog_row_event_extra_headers + 1 +columns_length1025 __values =[]1026 while event_length - bytes >binlog_quer_event_stern:1027 values =[]1028 null_bit =self.read_bytes(columns_length)1029 bytes +=columns_length1030 for idex inrange(len(colums_type_id_list)):1031 if self.__is_null(null_bit, idex):1032 values.append('Null')1033 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_TINY:1034 try:1035 values.append(self.read_uint8())1036 except:1037 values.append(self.read_int8())1038 bytes += 1

1039 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_SHORT:1040 try:1041 values.append(self.read_uint16())1042 except:1043 values.append(self.read_int16())1044 bytes += 2

1045 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_INT24:1046 try:1047 values.append(self.read_uint24())1048 except:1049 values.append(self.read_int24())1050 bytes += 3

1051 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_LONG:1052 try:1053 values.append(self.read_uint32())1054 except:1055 values.append(self.read_int32())1056 bytes += 4

1057 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_LONGLONG:1058 try:1059 values.append(self.read_uint64())1060 except:1061 values.append(self.read_int64())1062 bytes += 8

1063

1064 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_NEWDECIMAL:1065 _list =metadata_dict[idex]1066 decimals,read_bytes = self.__read_new_decimal(precision=_list[0],decimals=_list[1])1067 values.append(decimals)1068 bytes +=read_bytes1069

1070 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_DOUBLE ,column_type_dict.MYSQL_TYPE_FLOAT]:1071 _read_bytes =metadata_dict[idex]1072 if _read_bytes == 8:1073 _values, = struct.unpack('

1079 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_TIMESTAMP2:1080 _time,read_bytes = self.__add_fsp_to_time(datetime.datetime.fromtimestamp(self.read_int_be_by_size(4)),metadata_dict[idex])1081 values.append(str(_time))1082 bytes += read_bytes + 4

1083 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_DATETIME2:1084 _time,read_bytes = self.__read_datetime2(metadata_dict[idex])1085 values.append(str(_time))1086 bytes += 5 +read_bytes1087 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_YEAR:1088 _date = self.read_uint8() + 1900

1089 values.append(_date)1090 bytes += 1

1091 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_DATE:1092 _time = self.__read_date()1093 values.append(str(_time))1094 bytes += 3

1095

1096 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_TIME2:1097 _time,read_bytes = self.__read_time2(metadata_dict[idex])1098 bytes +=read_bytes1099 values.append(str(_time))1100

1101 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_VARCHAR ,column_type_dict.MYSQL_TYPE_VAR_STRING ,column_type_dict.MYSQL_TYPE_BLOB,1102 column_type_dict.MYSQL_TYPE_TINY_BLOB,column_type_dict.MYSQL_TYPE_LONG_BLOB,column_type_dict.MYSQL_TYPE_MEDIUM_BLOB]:1103 _metadata =metadata_dict[idex]1104 value_length =self.read_uint_by_size(_metadata)1105 '''

1106 if _metadata == 1:1107 value_length = self.read_uint8()1108 elif _metadata == 2:1109 value_length = self.read_uint16()1110 elif _metadata == 3:1111 value_length = self.read_uint24()1112 elif _metadata == 4:1113 value_length = self.read_uint32()1114 '''

1115 values.append(str(self.__read_decode(value_length)))1116 bytes += value_length +_metadata1117 elif colums_type_id_list[idex] in[column_type_dict.MYSQL_TYPE_JSON]:1118 _metadata =metadata_dict[idex]1119 value_length =self.read_uint_by_size(_metadata)1120 values.append(str(self.read_binary_json(value_length)))1121 bytes += value_length +_metadata1122 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_STRING:1123 _metadata =metadata_dict[idex]1124 if _metadata <= 255:1125 value_length =self.read_uint8()1126 values.append(str(self.__read_decode(value_length)))1127 _read = 1

1128 else:1129 value_length =self.read_uint16()1130 values.append(str(self.__read_decode(value_length)))1131 _read = 2

1132 bytes += value_length +_read1133 elif colums_type_id_list[idex] ==column_type_dict.MYSQL_TYPE_ENUM:1134 _metadata =metadata_dict[idex]1135 if _metadata == 1:1136 values.append('@'+str(self.read_uint8()))1137 elif _metadata == 2:1138 values.append('@'+str(self.read_uint16()))1139 bytes +=_metadata1140

1141 if type ==binlog_events.UPDATE_ROWS_EVENT:1142 __values.append(values)1143 else:1144 super(Read,self).TractionVlues(after_value=values,type=type)1145 if self.__packet isNone:1146 self.file_data.seek(event_length - bytes,1)1147 return __values

1148

1149

1150

1151 defwrite_row_event(self,event_length,colums_type_id_list,metadata_dict,type):1152 self.read_row_event(event_length,colums_type_id_list,metadata_dict,type)1153

1154 defdelete_row_event(self,event_length,colums_type_id_list,metadata_dict,type):1155 self.read_row_event(event_length, colums_type_id_list, metadata_dict,type)1156

1157 defupdate_row_event(self,event_length,colums_type_id_list,metadata_dict,type):1158 values =self.read_row_event(event_length,colums_type_id_list,metadata_dict,type)1159 __values = [values[i:i+2] for i in xrange(0,len(values),2)]1160 for i in range(len(__values)):1161 super(Read,self).TractionVlues(before_value=__values[i][0],after_value=__values[i][1],type=type)1162

1163

1164

1165 classCheckEvent(Echo):1166 def __init__(self,filename=None,start_position=None,stop_position=None,1167 start_datetime=None,stop_datetime=None,_thread_id=None,gtid=None,rollback=None,1168 user=None,host=None,passwd=None,port=None):1169 self.cloums_type_id_list =None1170 self.metadata_dict =None1171 self._gtid =None1172 self._thread_id_status =None1173 self._func =None1174

1175 self.start_position =start_position1176 self.stop_position =stop_position1177 self.start_datetime =start_datetime1178 self.stop_datetime =stop_datetime1179 self._thread_id =_thread_id1180 self.gtid =gtid1181 ifrollback:1182 if user is None or host is None or passwd isNone:1183 Usage()1184 sys.exit()1185 _rollback.rollback_status =True1186 _rollback._myfunc = GetRollStatement(host=host,user=user,passwd=passwd,port=(port if port else 3306))1187 if os.path.exists('tmp_rollback'):1188 os.remove('tmp_rollback')1189 _rollback._myfile = open('tmp_rollback','a+')1190

1191 self._pos = start_position if start_position else 4

1192 if filename isNone:1193 print 'NO SUCH FILE'

1194 Usage()1195 sys.exit()1196

1197 self.readbinlog = Read(start_position=start_position,filename=filename)1198 self.__read()1199

1200 def __gtid_event_filter(self,type,type_code=None,event_length=None,execute_time=None):1201 if type_code is None and event_length is None and execute_time isNone:1202 type_code, event_length, execute_time =self.readbinlog.read_header()1203 whileTrue:1204 if type == 'pos' and self._pos > self.stop_position andself.stop_position \1205 and type_code ==binlog_events.GTID_LOG_EVENT:1206 break

1207 elif type == 'datetime' and self.stop_datetime and execute_time >self.stop_datetime:1208 break

1209 if type_code isNone:1210 break

1211 if self._gtid isNone:1212 if type_code ==binlog_events.GTID_LOG_EVENT:1213 gtid =self.readbinlog.read_gtid_event(event_length)1214 if gtid ==self.gtid:1215 self._gtid =gtid1216 self.Gtid(execute_time, gtid, self._pos)1217 else:1218 self.readbinlog.file_data.seek(event_length-binlog_event_header_len,1)1219 else:1220 if type_code ==binlog_events.GTID_LOG_EVENT:1221 break

1222 self.__read_event(type_code,event_length,execute_time)1223 self._pos +=event_length1224 type_code, event_length, execute_time =self.readbinlog.read_header()1225

1226 def __thread_id_filed(self,type,type_code=None,event_length=None,execute_time=None):1227 if type_code is None and event_length is None and execute_time isNone:1228 type_code, event_length, execute_time =self.readbinlog.read_header()1229 whileTrue:1230 if type == 'pos' and self._pos > self.stop_position andself.stop_position \1231 and type_code ==binlog_events.GTID_LOG_EVENT:1232 break

1233 elif type == 'datetime' and self.stop_datetime and execute_time >self.stop_datetime:1234 break

1235 if type_code isNone:1236 break

1237

1238 if type_code == binlog_events.QUERY_EVENT and self._thread_id_status isNone:1239 thread_id, database_name, sql_statement =self.readbinlog.read_query_event(event_length)1240 if thread_id ==self._thread_id:1241 ifself._gtid:1242 self.Gtid(execute_time, self._gtid, self._pos)1243 self._thread_id_status =True1244 self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._pos)1245 self.readbinlog.file_data.seek(4,1)1246 elif self._thread_id_status and type_code != binlog_events.QUERY_EVENT and type_code !=binlog_events.GTID_LOG_EVENT:1247 self.__read_event(type_code,event_length,execute_time)1248 elif type_code == binlog_events.QUERY_EVENT andself._thread_id_status:1249 thread_id, database_name, sql_statement =self.readbinlog.read_query_event(event_length)1250 if thread_id !=self._thread_id:1251 self._thread_id_status =None1252 else:1253 self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._pos)1254 self.readbinlog.file_data.seek(4, 1)1255 elif type_code == binlog_events.GTID_LOG_EVENT and self._thread_id_status isNone:1256 self._gtid =self.readbinlog.read_gtid_event(event_length)1257 else:1258 self.readbinlog.file_data.seek(event_length-binlog_event_header_len,1)1259 self._pos +=event_length1260 type_code, event_length, execute_time =self.readbinlog.read_header()1261

1262 def __read_event(self,type_code,event_length,execute_time):1263 if type_code ==binlog_events.FORMAT_DESCRIPTION_EVENT:1264 binlog_ver, server_ver, create_time =self.readbinlog.read_format_desc_event()1265 self.Version(binlog_ver, server_ver, create_time)1266 self.readbinlog.file_data.seek(event_length - binlog_event_header_len - read_format_desc_event_length,1)1267 elif type_code ==binlog_events.QUERY_EVENT:1268 thread_id, database_name, sql_statement =self.readbinlog.read_query_event(event_length)1269 self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._pos)1270 self.readbinlog.file_data.seek(4,1)1271 elif type_code ==binlog_events.XID_EVENT:1272 xid_num =self.readbinlog.read_xid_variable()1273 self.Xid(execute_time, xid_num, self._pos)1274 self.readbinlog.file_data.seek(4,1)1275 elif type_code ==binlog_events.TABLE_MAP_EVENT:1276 database_name, table_name, self.cloums_type_id_list, self.metadata_dict =self.readbinlog.read_table_map_event(1277 event_length)1278 self.Tablemap(execute_time, table_name)1279 elif type_code ==binlog_events.GTID_LOG_EVENT:1280 gtid =self.readbinlog.read_gtid_event(event_length)1281 self.Gtid(execute_time, gtid, self._pos)1282 elif type_code ==binlog_events.WRITE_ROWS_EVENT:1283 self.readbinlog.write_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, type_code)1284 elif type_code ==binlog_events.DELETE_ROWS_EVENT:1285 self.readbinlog.delete_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, type_code)1286 elif type_code ==binlog_events.UPDATE_ROWS_EVENT:1287 self.readbinlog.update_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, type_code)1288 else:1289 self.readbinlog.file_data.seek(event_length - binlog_event_header_len,1)1290

1291 def __read_binlog(self,type,type_code=None,event_length=None,execute_time=None):1292 if type_code is None and event_length is None and execute_time isNone:1293 type_code, event_length, execute_time =self.readbinlog.read_header()1294 whileTrue:1295 if type == 'pos' and self._pos > self.stop_position and self.stop_position and\1296 type_code ==binlog_events.GTID_LOG_EVENT:1297 break

1298 elif type == 'datetime' and self.stop_datetime and execute_time >self.stop_datetime:1299 break

1300 if type_code isNone:1301 break

1302 self.__read_event(type_code=type_code,event_length=event_length,execute_time=execute_time)1303 self._pos +=event_length1304 type_code, event_length, execute_time =self.readbinlog.read_header()1305

1306 def __read(self):1307 ifself.start_position:1308 ifself.gtid:1309 self.__gtid_event_filter('pos')1310 elifself._thread_id:1311 self.__thread_id_filed('pos')1312 else:1313 self.__read_binlog('pos')1314 elifself.start_datetime:1315 whileTrue:1316 type_code, event_length, execute_time =self.readbinlog.read_header()1317 if execute_time >=self.start_datetime:1318 break

1319 self.readbinlog.file_data.seek(event_length - binlog_event_header_len,1)1320 self._pos +=event_length1321 ifself.gtid:1322 self.__gtid_event_filter('datetime',type_code,event_length,execute_time)1323 elifself._thread_id:1324 self.__thread_id_filed('datetime',type_code,event_length,execute_time)1325 else:1326 self.__read_binlog('datetime',type_code,event_length,execute_time)1327 else:1328 ifself.gtid:1329 self.__gtid_event_filter('pos')1330 elifself._thread_id:1331 self.__thread_id_filed('pos')1332 else:1333 self.__read_binlog('pos')1334

1335 self.readbinlog.file_data.close()1336 if_rollback.rollback_status:1337 _rollback._myfunc._close()1338 ps =PrintSql()1339 ps.read()1340

1341 classReplicationMysql(Echo):1342 def __init__(self,block = None,server_id = None,log_file =None,1343 log_pos = None,host=None,user=None,passwd=None,rollback=None,1344 port = None,gtid = None,_thread_id = None,stop_pos=None):1345 importpymysql1346 _remote_filed._gtid =gtid1347 _remote_filed._thread_id =_thread_id1348

1349 self._stop_pos =stop_pos1350 self._log_file =log_file1351 self._log_pos =log_pos1352 self.block = block if block != None elseFalse1353 self.server_id = server_id if server_id != None else 133

1354 self.port = port if port != None else 3306

1355 self.connection = pymysql.connect(host=host,1356 user=user,1357 password=passwd,port=self.port,1358 db='',1359 charset='utf8mb4',1360 cursorclass=pymysql.cursors.DictCursor)1361 ifrollback:1362 _remote_filed._rollback_status =True1363 _rollback._myfunc = GetRollStatement(host=host,user=user,passwd=passwd,port=self.port)1364

1365 self.ReadPack()1366

1367 def __checksum_enabled(self):1368 """Return True if binlog-checksum = CRC32. Only for MySQL > 5.6"""

1369 with self.connection.cursor() as cur:1370 sql = 'SHOW GLOBAL VARIABLES LIKE "BINLOG_CHECKSUM";'

1371 cur.execute(sql)1372 result =cur.fetchone()1373

1374 if result isNone:1375 returnFalse1376 if 'Value' in result and result['Value'] isNone:1377 returnFalse1378 returnTrue1379

1380 def __set_checksum(self):1381 with self.connection.cursor() as cur:1382 cur.execute("set @master_binlog_checksum= @@global.binlog_checksum;")1383

1384 defGetFile(self):1385 with self.connection.cursor() as cur:1386 sql = "show master status;"

1387 cur.execute(sql)1388 result =cur.fetchone()1389 return result['File'],result['Position']1390

1391 defPackeByte(self):1392

1393 COM_BINLOG_DUMP = 0x12

1394

1395 if self._log_file isNone:1396 if self._log_pos isNone:1397 self._log_file,self._log_pos =self.GetFile()1398 else:1399 self._log_file,_ =self.GetFile()1400 elif self._log_file and self._log_pos isNone:1401 self._log_pos = 4

1402

1403 prelude = struct.pack('

1406 prelude += struct.pack('

1412 prelude += struct.pack('

1416

1417 defUnPack(self,pack):1418 #header

1419 next_log_pos =None1420 unpack = struct.unpack('

1433 elif event_type ==binlog_events.GTID_LOG_EVENT:1434 gtid =_Read.read_gtid_event()1435 self.Gtid(execute_time,gtid,self._log_pos)1436

1437 elif event_type ==binlog_events.XID_EVENT:1438 xid =_Read.read_xid_variable()1439 self.Xid(execute_time,xid,self._log_pos)1440 elif event_type ==binlog_events.TABLE_MAP_EVENT:1441 database_name, table_name, self.cloums_type_id_list, self.metadata_dict =_Read.read_table_map_event(event_length)1442 self.Tablemap(execute_time, table_name)1443 elif event_type ==binlog_events.WRITE_ROWS_EVENT:1444 _Read.write_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, event_type)1445 elif event_type ==binlog_events.DELETE_ROWS_EVENT:1446 _Read.delete_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, event_type)1447 elif event_type ==binlog_events.UPDATE_ROWS_EVENT:1448 _Read.update_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, event_type)1449 ifnext_log_pos:1450 self._log_pos =next_log_pos1451

1452 defReadPack(self):1453 _packet =self.PackeByte()1454 if self.__checksum_enabled():1455 self.__set_checksum()1456 importpymysql1457 if pymysql.__version__ < "0.6":1458 self.connection.wfile.write(_packet)1459 self.connection.wfile.flush()1460 else:1461 self.connection._write_bytes(_packet)1462 self.connection._next_seq_id = 1

1463

1464 whileTrue:1465 try:1466 if pymysql.__version__ < "0.6":1467 pkt =self.connection.read_packet()1468 else:1469 pkt =self.connection._read_packet()1470

1471 self.UnPack(pkt)1472 except:1473 self.connection.close()1474 break

1475 if self._stop_pos and self._log_pos >self._stop_pos:1476 break

1477

1478

1479 defUsage():1480 __usage__ = """

1481 Usage:1482 Options:1483 -h [--help] : print help message1484 -f [--file] : the file path1485 --start-position : Start reading the binlog at position N. Applies to the1486 first binlog passed on the command line.1487 --stop-position : Stop reading the binlog at position N. Applies to the1488 last binlog passed on the command line.1489 --start-datetime : Start reading the binlog at first event having a datetime1490 equal or posterior to the argument; the argument must be1491 a date and time in the local time zone, in any format1492 accepted by the MySQL server for DATETIME and TIMESTAMP1493 types, for example: 2004-12-25 11:25:56 (you should1494 probably use quotes for your shell to set it properly)1495 --stop-datetime : Stop reading the binlog at first event having a datetime1496 equal or posterior to the argument; the argument must be1497 a date and time in the local time zone, in any format1498 accepted by the MySQL server for DATETIME and TIMESTAMP1499 types, for example: 2004-12-25 11:25:56 (you should1500 probably use quotes for your shell to set it properly).1501 -t [--thread-id] : filter the executing thread id1502

1503 -g [--gtid] : obtain a certain GTID content of transactions1504

1505 -r [--rollback] : generate the rollback statement1506 -u [--user] : User for login if not current user1507 -p [--passwd] : Password to use when connecting to server1508 -H [--host] : Connect to host, default localhost1509 -P [--port] : Port number to use for connection ,default 33061510 --remote: Replication slave1511 --log-file: Set replication log file, default master current log file1512 --start-position: Start replication at log position (resume_stream should be true),default current position1513 --stop-position: Stop replication at log position1514 --block: Read on stream is blocking1515 --server-id: default 1331516 """

1517 print __usage__

1518

1519 defmain(argv):1520 _argv ={}1521 try:1522 opts, args = getopt.getopt(argv[1:], 'hrf:t:g:H:p:P:u:', ['help', 'file=', 'start-position=','stop-position=','start-datetime=',1523 'stop-datetime=','host=','user=','passwd=','port=','thread-id=','gtid=',1524 'rollback','remote','log-file=','block','server-id='])1525 exceptgetopt.GetoptError, err:1526 printstr(err)1527 Usage()1528 sys.exit(2)1529 for o, a inopts:1530 if o in ('-h', '--help'):1531 Usage()1532 sys.exit(1)1533 elif o in ('-f', '--file'):1534 _argv['file'] =a1535 elif o in ('--start-position',):1536 _argv['start-position'] =int(a)1537 elif o in ('--stop-position',):1538 _argv['stop-position'] =int(a)1539 elif o in ('--start-datetime'):1540 _argv['start-datetime'] =a1541 elif o in ('--stop-datetime'):1542 _argv['stop-datetime'] =a1543 elif o in ('-t','--thread-id'):1544 _argv['thread-id'] =int(a)1545 elif o in ('-g','--gtid'):1546 _argv['gtid'] =a1547 elif o in ('-r','--rollback'):1548 _argv['rollback'] =True1549 elif o in ('-u','--user'):1550 _argv['user'] =a1551 elif o in ('-H','--host'):1552 _argv['host'] =a1553 elif o in ('-p','--passwd'):1554 _argv['passwd'] =a1555 elif o in ('-P','--port'):1556 _argv['port'] =int(a)1557 elif o in ('--remote'):1558 _argv['remote'] =True1559 elif o in ('--log-file'):1560 _argv['log-file'] =a1561 elif o in ('--block'):1562 _argv['block'] =True1563 elif o in ('--server-id'):1564 _argv['server-id'] =int(a)1565 else:1566 print 'unhandled option'

1567 sys.exit(3)1568

1569 if 'rollback' in_argv:1570 if 'remote' in_argv:1571 ReplicationMysql(user=_argv['user'],port=(_argv['port'] if 'port' in _argv elseNone),1572 passwd=_argv['passwd'],host=(_argv['host'] if 'host' in _argv else 'localhost'),1573 log_file=(_argv['log-file'] if 'log-file' in _argv elseNone),1574 log_pos=(_argv['start-position'] if 'start-position' in _argv elseNone),1575 stop_pos=(_argv['stop-position'] if 'stop-position' in _argv elseNone),1576 server_id=(_argv['server-id'] if 'server-id' in _argv elseNone),1577 block=(_argv['block'] if 'block' in _argv elseNone),1578 gtid=(_argv['gtid'] if 'gtid' in _argv elseNone),1579 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone),1580 rollback=(_argv['rollback'] if 'rollback' in _argv elseNone))1581 elif 'start-position' in_argv:1582 CheckEvent(filename=_argv['file'],gtid=(_argv['gtid'] if 'gtid' in _argv elseNone),1583 start_position=(_argv['start-position'] if 'start-position' in _argv elseNone),1584 stop_position=(_argv['stop-position'] if 'stop-position' in _argv elseNone),1585 rollback = _argv['rollback'],user=_argv['user'],1586 host=(_argv['host'] if 'host' in _argv else 'localhost'),1587 passwd=_argv['passwd'],1588 port=(_argv['port'] if 'port' in _argv elseNone),1589 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone))1590 elif 'gtid' in_argv:1591 CheckEvent(filename=_argv['file'],1592 gtid=(_argv['gtid'] if 'gtid' in _argv else None),rollback=_argv['rollback'],1593 user=_argv['user'],1594 host=(_argv['host'] if 'host' in _argv else 'localhost'),1595 passwd=_argv['passwd'],1596 port=(_argv['port'] if 'port' in _argv elseNone))1597 else:1598 CheckEvent(filename=_argv['file'],rollback=_argv['rollback'],user=_argv['user'],1599 host=(_argv['host'] if 'host' in _argv else 'localhost'),1600 passwd=_argv['passwd'],1601 port=(_argv['port'] if 'port' in _argv elseNone),1602 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone))1603 elif 'remote' in_argv:1604 ReplicationMysql(user=_argv['user'], port=(_argv['port'] if 'port' in _argv elseNone),1605 passwd=_argv['passwd'], host=(_argv['host'] if 'host' in _argv else 'localhost'),1606 log_file=(_argv['log-file'] if 'log-file' in _argv elseNone),1607 log_pos=(_argv['start-position'] if 'start-position' in _argv elseNone),1608 stop_pos=(_argv['stop-position'] if 'stop-position' in _argv elseNone),1609 server_id=(_argv['server-id'] if 'server-id' in _argv elseNone),1610 block=(_argv['block'] if 'block' in _argv elseNone),1611 gtid=(_argv['gtid'] if 'gtid' in _argv elseNone),1612 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone),1613 rollback=(_argv['rollback'] if 'rollback' in _argv elseNone))1614

1615 elif 'start-position' in_argv:1616 CheckEvent(start_position=(_argv['start-position'] if _argv['start-position'] elseNone),1617 filename=_argv['file'],gtid=(_argv['gtid'] if 'gtid' in _argv elseNone),1618 stop_position=(_argv['stop-position'] if 'stop-position' in _argv elseNone),1619 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone))1620 elif 'start-datetime' in_argv:1621 CheckEvent(start_datetime=(_argv['start-datetime'] if 'start-datetime' in _argv elseNone),1622 filename=_argv['file'],gtid=(_argv['gtid'] if 'gtid' in _argv elseNone),1623 stop_datetime=(_argv['stop-datetime'] if 'stop-datetime' in _argv elseNone),1624 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone))1625 elif 'gtid' in_argv:1626 CheckEvent(filename=_argv['file'],1627 gtid=(_argv['gtid'] if 'gtid' in _argv elseNone),1628 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone))1629 else:1630 CheckEvent(filename=_argv['file'],1631 _thread_id=(_argv['thread-id'] if 'thread-id' in _argv elseNone))1632

1633

1634

1635 if __name__ == "__main__":1636 main(sys.argv)

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值