import logging.handlers import json import os import pymysql from config import ROOT_DIR # 参数化方法,读取json文件用 def read_json(filename, key): # 拼接文件完整路径 file_path = os.path.join(os.path.join(ROOT_DIR, "data"), filename) arr = [] with open(file_path, 'r', encoding="UTF-8") as f: for data in json.load(f).get(key): # 参数化只能是[[],[],[]]或者[(),(),()],所以里边的字典必须转换格式 # data.values()是取字典的值,然后再tuple转换成元祖。或者list转换成列表 arr.append(tuple(data.values())) return arr # 日志工具 class GetLog: @classmethod def get_log(cls): cls.log = None # 如果log是空,设置它 if cls.log is None: # 1、 获取日志器 cls.log = logging.getLogger() # 设置日志级别 cls.log.setLevel(logging.INFO) # 2、获取处理器 # 按时间分割日志 ,日志保存到文件且根据时间去分割 log_file_path = ROOT_DIR + os.sep + "log" + os.sep + "log.log" # tf = logging.handlers.TimedRotatingFileHandler(log_file_path, # when="midnight", # 一天一夜,意思是24h # interval=1, # 间隔时间,填1是24h,填2是48 # backupCount=3, # 备份数量 # encoding="utf-8" # ) # 按文件大小切割日志 1024 * 1024 * 500, 日志大小 50M tf = logging.handlers.RotatingFileHandler(log_file_path, maxBytes=1024 * 1024 * 5, backupCount=5, encoding="utf-8") # 3、获取格式器 fm = logging.Formatter("%(asctime)s %(levelname)s [%(filename)s(%(funcName)s:%(lineno)d)] - %(message)s") # 4、将格式器添加到处理器中 tf.setFormatter(fm) # 5、将处理器添加到日志器中 cls.log.addHandler(tf) # 返回日志器 return cls.log # 连接数据库工具 def conn_mysql(sql): conn = None cursor = None try: conn = pymysql.connect(host="", user="root", password="", db="ets", port=3306, charset="utf8") cursor = conn.cursor() cursor.execute(sql) sql_method = sql.split()[0] if "select" == sql_method.lower(): result = cursor.fetchall() # result = cursor.fetchone() return result conn.commit() # 提交事务 return cursor.rowcount except Exception as e: GetLog.get_log().error(e) if conn: conn.close() if cursor: cursor.close() finally: if conn: conn.close() if cursor: cursor.close() # 清除数据方法 def clear_data(): sql_1 = "selecT * from ets_anjywxxb limit 0,1" return conn_mysql(sql_1) if __name__ == '__main__': pass
python常用方法合集
于 2023-08-03 17:13:47 首次发布