1) 数据导出
sudo docker exec -it your_container_id /bin/bash -c 'clickhouse-client --user default --password yourpwd --query="select * from db.your_xxx_table" > /tmp_clickhouse_backup_csv_data/your_xxx_table-20230113.tsv'
2) 数据从docker当中拷贝出来
sudo docker cp your_container_id:/tmp_clickhouse_backup_csv_data/your_xxx_table-20230113.tsv ./
3) 数据恢复
cat /tmp_clickhouse_backup_csv_data/test_backup.tsv | clickhouse-client --user default --password yourpwd --query "Insert into db.your_xxx_table format TSV"
4) 进入生产ch指定数据库
sudo docker exec -it your_container_id /bin/bash -c 'clickhouse-client --user root --password yourpwd -d yourdb'
# -*- coding: utf-8 -*-
import json
#import telegram
import boto3
import os
#from clickhouse_driver import Client
import datetime
import time
# 注册s3的客户端
def getS3Client():
prod_s3_client = boto3.client(
's3',
aws_access_key_id="AKIA37IEKUUUQMK5GDEM",
aws_secret_access_key="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"
)
return prod_s3_client
if __name__ == '__main__':
prod_s3_client = getS3Client()
path = os.getcwd() # 获取当前python脚本所在的绝对路径
file_name_list = os.listdir(path) # 获取指定路径下的所有文件名称list
print(path, file_name_list)
for fname in file_name_list:
if 'tsv' in fname:
FilePath = '/{0}/{1}'.format(path, fname)
print('-------------当前 upload file is-------------->', FilePath, fname)
prod_s3_client.upload_file(Bucket='fp-clickhouse-backup', Filename=FilePath, Key=fname)
print('task finish.')