每个桶点存储桶指标可以查看当前桶的大小,AWS桶实在太多了,一个一个去点开实在太麻烦了,S3对比OSS还是有不是太适合国人喜欢的地方,分享查看脚本,
输入access_key和区域就可以用了。
#!/usr/bin/env python3
# coding=utf-8
import boto3
import datetime
#key填写账户生成的,可以去用户账户里面生成
aws_access_key_id = ''
aws_secret_access_key = ''
#桶所在区域
region_name = ''
#获取区域内所有桶名称
client = boto3.client('s3',aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name,)
response = client.list_buckets()
for name in response['Buckets']:
bucketname=name['Name']
#查看桶的大小要调CloudWatch的接口
client = boto3.client('cloudwatch',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name,
)
#看了下监控CloudWatch监控的是2天前的数据,这里可以取大点时间范围
td = datetime.timedelta(days=4, seconds=0,microseconds=0)
response = client.get_metric_statistics(
Namespace='AWS/S3',
MetricName='BucketSizeBytes',
#文档参考https://docs.aws.amazon.com/zh_cn/AmazonS3/latest/userguide/cloudwatch-monitoring-accessing.html
#接口格式参考https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudwatch.html#CloudWatch.Client.get_metric_statistics
Dimensions=[
{
'Name': 'BucketName',
'Value': bucketname
},
{
'Name': 'StorageType',
'Value': 'StandardStorage'
},
],
StartTime=(datetime.datetime.utcnow() - td).isoformat(),
EndTime=datetime.datetime.utcnow().isoformat(),
#这个是间隔时间 默认单位是S 86400是取1天
Period=86400,
Statistics=[
'Average',
],
Unit='Bytes',
)
#bytes换成GB TB查看 有的桶太小了就直接过滤了懒的看了
try:
standardstorage_size=response['Datapoints'][0]['Average']
except:
continue
#print(bucketname,'%.2fTB'%(standardstorage_size/1024/1024/1024/1024))
print(bucketname,'%.2fGB'%(standardstorage_size/1024/1024/1024))
看下效果,一次取出这个区域所有桶大小了