提高数据库存储速度
最近在处理一个亿级的数据,这些数据被分散地存储在600多个文本文件中,需要通过编程将这些文件存储到数据库中。文本处理当然首选python啦,于是我便写了一个脚本,如下:
# -*- coding: utf-8 -*-
import sqlite3
import re
import os
import datetime
import time
conn = sqlite3.connect("test.db")
conn.text_factory = str
c= conn.cursor()
def save(filename, contents):
fh = open(filename, 'a+')
fh.write(contents)
fh.close()
def dirfile(filename):
return os.listdir(filename)
def insertdb(i,emailname,psw):
v=(i,emailname,psw)
c.execute("INSERT INTO EmailInfo VALUES (?,?,?)",v)
# conn.commit()
# execute "INSERT"
def delete(i):
c.execute("delete from EmailInfo where id=?",i)
conn.commit()
def checksql():
c.execute('SELECT * FROM EmailInfo')
result=c.fetchall()
print(result)
conn.commit()
def main(i):
array=['0','1','2','3','4','5','6','7','8','9','10','a','b',