使用需要自己修改:
- 用户名、 密码 、服务器地址 、指定的bucket
- 本地上传哪个文件夹
- 要上传到远端的路径
import os
from tqdm import tqdm
import botocore
import boto3
from boto3.session import Session
from loguru import logger
#基本配置,链接(用户名、密码)
aws_access_key_id = '你的name'
aws_secret_access_key = '你的secret'
session = Session(aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key)
#good: 使用client
s3 = session.client('s3',endpoint_url='你服务器的地址:9000')
# 下载一个文件夹,并且保存文件夹内的目录结果的函数
def upload_folder_to_s3(s3_client, s3_bucket, local_dir,s3_path):
stream = tqdm(os.walk(input_dir))
for path, subdirs, files in stream:
for file in files:
dest_path = path.replace(input_dir, "").replace(os.sep, '/')
s3_file = f'{s3_path}/{dest_path}/{file}'.replace('//', '/')
local_file = os.path.join(path, file)
s3_client.upload_file(local_file, s3_bucket, s3_file) # 上传接口upload_file
stream.set_description(f'Uploaded {local_file} to {s3_file}')
print(f"Successfully uploaded {input_dir} to S3 {s3_path}")
if __name__ == "__main__":
pass
upload_folder_to_s3(s3,'test','/local/testdata/','/remote/testdata/')
# test 是指定的buckername
# /local/testdata 要上传的文件夹
# /remote/testdata/ 是远端也就是s3 ,上传的位置,没有这个remote/testdata 这两个文件夹会自动创建