备份
import logging
import json
import os
import requests
import datetime
import re
import subprocess
from django.conf import settings
from yz_lib.utils.qi_niu import QiNiu
from yz_lib.utils.others import sha1_file
from django.db import connection
from django.utils import timezone
from django.core.serializers.json import DjangoJSONEncoder
__author__ = 'JayChen'
logger = logging.getLogger('scripts')
class YZBackUp(object):
"""
increase_models: 增量备份的models, 增量式备份时必填
zip_password: 压缩文件的密码,默认没密码
"""
def __init__(self, increase_models: list, zip_password=None):
self.increase_models = increase_models
self.project_name = re.sub('\W', '_', settings.SITE_DOMAIN)
self._tm = timezone.now().strftime('%Y%m%d_%H%M%S')
self._dir = os.path.join(settings.PROJECT_PATH, 'media', 'tmp')
self.zip_password = 'P{}'.format(zip_password) if zip_password else ''
def upload(self, zip_file: str, remove_after_finish=True):
qn = QiNiu()
key = sha1_file(zip_file)
logger.info('准备上传: {}'.format(zip_file))
logger.info('文件 SHA1: {}'.format(key))
ret = qn.get_stat(key)
if ret:
logger.info('云端文件已存在')
else:
logger.info('云端文件不存在,开始上传')
ret, info = qn.upload_file(
zip_file,
key,
bucket_name=settings.QINIU_BACKUP_BUCKET if hasattr(settings, 'QINIU_BACKUP_BUCKET') else 'media'
)
logger.info('上传结果 ret: {}'.format(json.dumps(ret)))
logger.info('上传结果 info: {}'.format(info.text_body))
ret, info = qn.change_type(key, 1)
logger.info('转低频结果 ret: {}'.format(ret))
data = {'name': os.path.basename(zip_file)}
resp = requests.post('https://{}/api/v1/upload/{}/link/'.format(settings.PUBLIC_DOMAIN, key), data=data,
verify=False)
resp.raise_for_status()
if remove_after_finish:
os.remove(zip_file)
result = resp.json()
logger.info('上传结果: {}'.format(json.dumps(result, indent=4)))
if 'location' in result:
result['location'] = 'http://{}{}'.format(settings.PUBLIC_DOMAIN, result['location'])
return result
def dict_fetchall(self, _cursor):
"""
Return all rows from a cursor as a dict
:param _cursor: django.db.connections.cursor()
:return: []
"""
_columns = [col[0] for col in _cursor.description]
return [
dict(zip(_columns, row)) for row in _cursor.fetchall()
]
def backup_json(self, _model, _update_on):
_update_on = datetime.datetime.strftime(_update_on, '%Y-%m-%d %H:%M:%S')
name = _model._meta.db_table
logger.info('开始增量备份: {}'.format(name))
json_file = '{project_name}_incr_db_{name}.{tm}.json'.format(project_name=self.project_name, name=name, tm=self._tm)
zip_file = '{project_name}_incr_db_{name}.{tm}.json.zip'.format(project_name=self.project_name, name=name, tm=self._tm)
full_json_file = os.path.join(self._dir, json_file)
full_zip_file = os.path.join(self._dir, zip_file)
cursor = connection.cursor()
sql = '''SELECT * FROM {table_name} WHERE updated_on >= "{time}"'''.format(table_name=name, time=_update_on)
cursor.execute(sql)
data = self.dict_fetchall(cursor)
if data:
with open(full_json_file, 'w') as f:
f.write(json.dumps(data, cls=DjangoJSONEncoder))
logger.info('完成增量备份: {}'.format(name))
logger.info('开始打包文件')
subprocess.check_output(
'zip -r{password} -j {full_zip_file} {full_json_file}'.format(full_zip_file=full_zip_file,
full_json_file=full_json_file,
password=self.zip_password),
shell=True)
logger.info('{} zip 完成'.format(full_json_file))
os.remove(full_json_file)
return full_zip_file
else:
logger.info('No change in: {}'.format(name))
return None
def get_last_file_time(self, keyword):
"""
增量备份表, 获取上一个完整备份的时间
"""
query_url = 'http://{}/api/v1/uploads/'.format(settings.PUBLIC_DOMAIN)
params = {
'name__icontains': keyword,
'limit': 1,
'order_by': '-id',
'format': 'json',
}
resp = requests.get(query_url, params=params,
headers={'Authorization': 'Token {}'.format(settings.PUB_API_TOKEN)})
resp.raise_for_status()
data = resp.json()
if data['total_count'] > 0:
obj = data['objects'][0]
logger.info(obj)
name = obj.get('name', '')
time_str = re.split('\.', name)[1]
logger.info('last file time: {}'.format(time_str))
result = datetime.datetime.strptime(time_str, '%Y%m%d_%H%M%S')
obj['location'] = 'http://{}'.format(settings.PUBLIC_DOMAIN) + obj['link']
return obj, result
return None, None
def incr_backup(self) -> list:
"""
增量备份
"""
logger.info('开始备份数据库')
db_files = []
if not os.path.exists(self._dir):
logger.info('创建文件夹: {}'.format(self._dir))
os.makedirs(self._dir)
incr_dbs_backup, updated_on = self.get_last_file_time(self.project_name + '_incr_dbs')
if not updated_on:
logger.error('请先进行完整备份')
raise Exception('请先进行完整备份')
logger.info('开始备份: 其他表')
ignore_tables = [m._meta.db_table for m in self.increase_models]
sql_file = '{project_name}_other_dbs.{tm}.sql'.format(project_name=self.project_name, tm=self._tm)
zip_file = '{project_name}_other_dbs.{tm}.sql.zip'.format(project_name=self.project_name, tm=self._tm)
full_sql_file = os.path.join(self._dir, sql_file)
full_zip_file = os.path.join(self._dir, zip_file)
cmd = 'mysqldump -u{user} -p{password} -h{host} -P{port} --skip-comments {db} {ignore_tables} > {full_sql_file}'.format(
user=settings.DATABASES['default']['USER'],
password=settings.DATABASES['default']['PASSWORD'],
host=settings.DATABASES['default']['HOST'],
port=settings.DATABASES['default']['PORT'] if settings.DATABASES['default']['PORT'] else 3306,
db=settings.DATABASES['default']['NAME'],
ignore_tables=' '.join(
['--ignore-table=' + settings.DATABASES['default']['NAME'] + '.' + t for t in ignore_tables]),
full_sql_file=full_sql_file,
)
subprocess.check_output(cmd, shell=True)
logger.info('完成备份: 其他表')
logger.info('开始打包文件')
subprocess.check_output(
'zip -r{password} -j {full_zip_file} {full_sql_file}'.format(full_zip_file=full_zip_file,
full_sql_file=full_sql_file,
password=self.zip_password),
shell=True)
logger.info('{} zip 完成'.format(full_sql_file))
db_files.append(self.upload(full_zip_file))
db_files.append(incr_dbs_backup)
if os.path.exists(full_sql_file):
os.remove(full_sql_file)
for model in self.increase_models:
backup_file = self.backup_json(model, updated_on)
if backup_file:
db_files.append(self.upload(backup_file))
return db_files
def backup_all(self) -> list:
"""
完整备份
"""
logger.info('开始备份数据库')
db_files = []
if not os.path.exists(self._dir):
logger.info('创建文件夹: {}'.format(self._dir))
os.makedirs(self._dir)
tables = [m._meta.db_table for m in self.increase_models]
logger.info('开始备份: 其他表')
sql_file = '{project_name}_other_dbs.{tm}.sql'.format(project_name=self.project_name, tm=self._tm)
zip_file = '{project_name}_other_dbs.{tm}.sql.zip'.format(project_name=self.project_name, tm=self._tm)
full_sql_file = os.path.join(self._dir, sql_file)
full_zip_file = os.path.join(self._dir, zip_file)
cmd = 'mysqldump -u{user} -p{password} -h{host} -P{port} --skip-comments {db} {ignore_tables} > {full_sql_file}'.format(
user=settings.DATABASES['default']['USER'],
password=settings.DATABASES['default']['PASSWORD'],
host=settings.DATABASES['default']['HOST'],
port=settings.DATABASES['default']['PORT'] if settings.DATABASES['default']['PORT'] else 3306,
db=settings.DATABASES['default']['NAME'],
ignore_tables=' '.join(
['--ignore-table=' + settings.DATABASES['default']['NAME'] + '.' + t for t in tables]),
full_sql_file=full_sql_file,
)
subprocess.check_output(cmd, shell=True)
logger.info('完成备份: 其他表')
logger.info('开始打包文件')
subprocess.check_output(
'zip -r{password} -j {full_zip_file} {full_sql_file}'.format(full_zip_file=full_zip_file,
full_sql_file=full_sql_file,
password=self.zip_password),
shell=True)
logger.info('{} zip 完成'.format(full_sql_file))
db_files.append(self.upload(full_zip_file))
if os.path.exists(full_sql_file):
os.remove(full_sql_file)
logger.info('开始备份 需要增量备份的表')
sql_file = '{project_name}_incr_dbs.{tm}.sql'.format(project_name=self.project_name, tm=self._tm)
zip_file = '{project_name}_incr_dbs.{tm}.sql.zip'.format(project_name=self.project_name, tm=self._tm)
full_sql_file = os.path.join(self._dir, sql_file)
full_zip_file = os.path.join(self._dir, zip_file)
cmd = 'mysqldump -u{user} -p{password} -h{host} -P{port} --skip-comments {db} {tables} > {full_sql_file}'.format(
user=settings.DATABASES['default']['USER'],
password=settings.DATABASES['default']['PASSWORD'],
host=settings.DATABASES['default']['HOST'],
port=settings.DATABASES['default']['PORT'] if settings.DATABASES['default']['PORT'] else 3306,
db=settings.DATABASES['default']['NAME'],
tables=' '.join(tables),
full_sql_file=full_sql_file,
)
subprocess.check_output(cmd, shell=True)
logger.info('完成备份: 增量备份表')
logger.info('开始打包文件')
subprocess.check_output(
'zip -r{password} -j {full_zip_file} {full_sql_file}'.format(full_zip_file=full_zip_file,
full_sql_file=full_sql_file,
password=self.zip_password),
shell=True)
logger.info('{} zip 完成'.format(full_sql_file))
db_files.append(self.upload(full_zip_file))
if os.path.exists(full_sql_file):
os.remove(full_sql_file)
return db_files
备份恢复
import datetime
import json
import logging
import subprocess
import os
import re
import requests
import pytz
from django.conf import settings
logger = logging.getLogger('scripts')
__author__ = 'Jeff JayChen'
class YZRestore(object):
def __init__(self, increase_models: list, zip_password=None):
self.increase_models = increase_models
self.zip_password = 'P{}'.format(zip_password) if zip_password else ''
self.TEMP_DIR = settings.BACKUP_TMP_PATH
self.tz = pytz.timezone('UTC') if settings.USE_TZ else pytz.timezone(settings.TIME_ZONE)
def reset_db(self, db_key):
import MySQLdb as Database
connection = Database.connect(
user=settings.DATABASES[db_key]['USER'],
passwd=settings.DATABASES[db_key]['PASSWORD'],
host=settings.DATABASES[db_key]['HOST'],
port=settings.DATABASES[db_key]['PORT'] if settings.DATABASES[db_key]['PORT'] else 3306,
)
drop_query = 'DROP DATABASE IF EXISTS `%s`' % settings.DATABASES[db_key]['NAME']
utf8_support = 'CHARACTER SET utf8'
create_query = 'CREATE DATABASE `%s` %s' % (settings.DATABASES[db_key]['NAME'], utf8_support)
logger.info('Executing... "' + drop_query + '"')
connection.query(drop_query)
logger.info('Executing... "' + create_query + '"')
connection.query(create_query)
def db_exec(self, full_sql_path):
cmd = 'mysql -u{user} -p{password} -h{host} -P{port} -D {db} < {full_sql_path}'.format(
user=settings.DATABASES['default']['USER'],
password=settings.DATABASES['default']['PASSWORD'],
host=settings.DATABASES['default']['HOST'],
port=settings.DATABASES['default']['PORT'] if settings.DATABASES['default']['PORT'] else 3306,
db=settings.DATABASES['default']['NAME'],
full_sql_path=full_sql_path,
)
subprocess.check_output(cmd, shell=True)
def get_file(self, keyword, _incr_dbs_backup_time=None):
full_file_path = None
backup_time = None
query_url = 'http://{}/api/v1/uploads/'.format(settings.PUBLIC_DOMAIN)
params = {
'name__icontains': keyword,
'limit': 1,
'order_by': '-id',
'format': 'json',
}
resp = requests.get(query_url, params=params, headers={'Authorization': 'Token {}'.format(settings.PUB_API_TOKEN)})
resp.raise_for_status()
data = resp.json()
if data['total_count'] > 0:
obj = data['objects'][0]
logger.info(obj)
down_url = 'http://{}{}'.format(settings.PUBLIC_DOMAIN, obj['link'])
name = obj.get('name', '')
time_str = re.split('\.', name)[1]
logger.info('backup file time: {}'.format(time_str))
backup_time = datetime.datetime.strptime(time_str, '%Y%m%d_%H%M%S')
if _incr_dbs_backup_time and _incr_dbs_backup_time > backup_time:
return None, None
full_zip_path = os.path.join(self.TEMP_DIR, obj['name'])
full_file_path = re.sub('\.zip$', '', full_zip_path)
if os.path.isfile(full_file_path):
pass
else:
if not os.path.isfile(full_zip_path):
r = requests.get(down_url)
with open(full_zip_path, 'wb') as f:
f.write(r.content)
if re.search('\.zip', obj['name']):
subprocess.check_output(
'unzip -d {dir} {full_file_path}'.format(full_file_path=full_file_path, dir=self.TEMP_DIR),
shell=True)
os.remove(full_zip_path)
return full_file_path, backup_time
def restore_model(self, model, model_json, pk=None):
logger.info('开始恢复增量备份: {}'.format(model._meta.db_table))
if not pk:
raise Exception('增量备份请设置主键:pk')
with open(model_json, 'r') as f:
data = json.loads(f.read())
for d in data:
_pk = d.pop(pk)
price_exp = d.pop('price_exp', None)
obj = model.objects.filter(pk=_pk).nocache().first()
try:
if obj:
model.objects.filter(pk=obj.pk).nocache().update(**d)
update_data = {
'created_on': datetime.datetime.strptime(d.get('created_on'), '%Y-%m-%dT%H:%M:%S.%f').replace(
tzinfo=self.tz),
'updated_on': datetime.datetime.strptime(d.get('updated_on'), '%Y-%m-%dT%H:%M:%S.%f').replace(
tzinfo=self.tz)
}
model.objects.filter(pk=obj.pk).nocache().update(**update_data)
else:
model.objects.create(pk=_pk, **d)
update_data = {
'created_on': datetime.datetime.strptime(d.get('created_on'), '%Y-%m-%dT%H:%M:%S.%f').replace(
tzinfo=self.tz),
'updated_on': datetime.datetime.strptime(d.get('updated_on'), '%Y-%m-%dT%H:%M:%S.%f').replace(
tzinfo=self.tz)
}
if price_exp:
update_data.update({'price_exp': price_exp})
model.objects.filter(pk=_pk).update(**update_data)
except Exception as e:
logger.error('{}: {}'.format(model._meta.model_name, str(_pk)))
logger.error(e)
continue
def restore(self):
logger.info('开始恢复完整备份.')
if not hasattr(settings, 'BAK_FILE_KEYWORD'):
raise ValueError('请设置 settings.BAK_FILE_KEYWORD.')
keyword = settings.BAK_FILE_KEYWORD
logger.info('清空数据库')
self.reset_db('default')
logger.info('清空完毕,开始恢复数据...')
logger.info('开始恢复:其他表')
other_tables_sql, _ = self.get_file(keyword + '_other_dbs')
self.db_exec(other_tables_sql)
os.remove(other_tables_sql)
incr_tables_sql, incr_dbs_backup_time = self.get_file(keyword + '_incr_dbs')
self.db_exec(incr_tables_sql)
os.remove(incr_tables_sql)
logger.info('开始恢复增量备份.')
for m in self.increase_models:
data_json, _ = self.get_file(keyword + '_incr_db_{}.'.format(m['model']._meta.db_table), incr_dbs_backup_time)
if data_json:
self.restore_model(m['model'], data_json, m['pk'])
os.remove(data_json)
logger.info('备份恢复完成')
各项目备份脚本
import socket
from django_yz_lib.apps.pub.utils.yz_backup import YZBackUp
from django.apps import apps
from app.models import TempSensorLog, AuthLog, DutyRoster, ModbusLog
from django.utils import timezone
from django_yz_lib.utils.my_admin import MyAdmin
from django_yz_lib.utils.others import mg_send_mail
from django.conf import settings
__author__ = 'JayChen'
def run(*args):
_type = args[0]
started_at = timezone.localtime(timezone.now()).strftime('%Y-%m-%d %H:%M:%S')
str_models = [
{'app_label': 'app', 'model_name': 'historicalmyuser'}
]
increase_models = [apps.get_model(m['app_label'], m['model_name']) for m in str_models]
increase_models = increase_models + [TempSensorLog, AuthLog, DutyRoster, ModbusLog]
if _type == 'backup_all':
db_files = YZBackUp(increase_models=increase_models).backup_all()
else:
db_files = YZBackUp(increase_models=increase_models).incr_backup()
ended_at = timezone.localtime(timezone.now()).strftime('%Y-%m-%d %H:%M:%S')
各项目恢复备份脚本
"""
恢复备份脚本
"""
import logging
from django.apps import apps
from django.conf import settings
from django.core.management import call_command
from django_yz_lib.apps.pub.utils.yz_restore import YZRestore
from app.models import MyUser
from app.models import TempSensorLog, AuthLog, DutyRoster, ModbusLog
logger = logging.getLogger('scripts')
__author__ = 'JayChen'
def run(*args):
if not hasattr(settings, 'BAK_FILE_KEYWORD'):
raise ValueError('请设置 settings.BAK_FILE_KEYWORD.')
config = [
{'model': xxx, 'pk': 'id'},
{'model': xxx, 'pk': 'id'},
{'model': xxx, 'pk': 'id'},
{'model': xxx, 'pk': 'id'},
]
logger.info('开始恢复备份.')
str_models = [{'app_label': 'app', 'model_name': 'xxx', 'pk': 'xxxx'}, ]
_m = [{'model': apps.get_model(m['app_label'], m['model_name']), 'pk': m['pk']} for m in str_models]
increase_models = config + _m
YZRestore(increase_models=increase_models).restore()
logger.info('数据库恢复完成,开始执行 migrate...')
call_command("migrate", interactive=False)
logger.info('备份恢复完成')