上次记录的导表,有一个问题,就是当表比较大的时候,超过电脑内存,
程序就会有问题,这次采用 分批存入,后续追加。
但是验证后,还有问题,没达到预期目的。先记录一下吧。
#!/usr/bin/python
#coding=UTF-8
import xlwt
import MySQLdb
import conf
import sys
import xlrd
import datetime
from xlutils.copy import copy
reload(sys)
sys.setdefaultencoding('utf8')
def get_data(sql):
conn = MySQLdb.connect(conf.sh_dbhost,conf.sh_user,conf.sh_passd,conf.sh_dbname,charset="utf8")
cur = conn.cursor()
cur.execute(sql)
results = cur.fetchall() # 搜取所有结果
cur.close()
conn.close()
return results
def write_fild_to_excel(filename,sql):
workbook = xlwt.Workbook()
sheet = workbook.add_sheet('sheetname',cell_overwrite_ok=True)
fields = get_data(sql)
for field in range(0,len(fields)):
sheet.write(0,field,u'%s'%fields[field][0])
workbook.save(filename)
def write_data_to_excel(filename,sql):
results =get_data(sql)
workbook = xlrd.open_workbook(filename,'f')
worksheet = workbook.sheet_by_index(0)
nrows = worksheet.nrows
wb = copy(workbook)
ws = wb.get_sheet(0)
for row in range(1,len(results)+1):
for col in range(0,len(results[0])):
ws.write(row+nrows-1,col,u'%s'%results[row-1][col])
wb.save(filename)
if __name__ == "__main__":
sql_fild = "SELECT column_name FROM information_schema.columns WHERE Table_name= '%s'" %conf.sh_table
sql_data = "SELECT * FROM tb_product ORDER BY product_id DESC limit 4000"
write_fild_to_excel(r'E:\yu.xls',sql_fild)
write_data_to_excel(r'E:\yu.xls',sql_data)
print "done"