脚本比较简单,不做过多介绍,唯一需要注意的是考虑到存储账号下文件数量过多的情况(例如十万或者百万级的文件数),为了避免单次Query过多文件数导致卡死或者超时,因此脚本中使用了一个$tokenStop变量限制每次Query Blob的上线(脚本中定义的是5000一次,如果觉得有需要可以改成10000,不要太多)。
脚本如下:
param(
[Parameter(Mandatory = $true)]
[string]$SubscriptionName,
[Parameter(Mandatory = $true)]
[string]$StorageAccountName
)
Function SummarizeContainer($context, $containerName)
{
Write-Host ("Calculating container {0}" -f $containerName) -ForegroundColor Yellow;
$totalCount = 0;
$totalSize = 0;
$token = $null;
$tokenStep = 5000;
do
{
$blobs = Get-AzureStorageBlob -Container $containerName -Context $context -ContinuationToken $token -MaxCount $tokenStep;
if($blobs.Length -le 0)
{
break;
}
foreach($blob in $blobs)
{
$totalSize += $blob.Length;
}
$totalCount += $blobs.Count;
Write-Host ("{0} blobs calculated" -f $totalCount) -ForegroundColor Cyan;
$token = $blobs[$blobs.Count -1].ContinuationToken;
}
while($token -ne $null)
$totalSizeStr = "";
if (($totalSize/1024/1024/1024) -gt 1)
{
$totalSizeStr = "{0:N}GB" -f ($totalSize/1024/1024/1024);
}
elseif (($totalSize/1024/1024) -gt 1)
{
$totalSizeStr = "{0:N}MB" -f ($totalSize/1024/1024);
}
elseif (($totalSize/1024) -gt 1)
{
$totalSizeStr = "{0:N}KB" -f ($totalSize/1024);
}
else
{
$totalSizeStr = "{0:N}Byte" -f $totalSize;
}
return [PSCustomObject]@{ContainerName = $containerName
BlobCount = $totalCount
TotalSize = $totalSizeStr};
}
[void](Select-AzureSubscription -SubscriptionName $SubscriptionName);
$storage = Get-AzureStorageAccount -StorageAccountName $StorageAccountName;
$context = $storage.Context;
Write-Host "Collecting containers" -ForegroundColor Yellow;
$containers = Get-AzureStorageContainer -Context $context;
$containerSummary = @();
foreach ($container in $containers)
{
$containerSummary += (SummarizeContainer $context $container.Name);
}
$containerSummary | Select ContainerName, BlobCount, TotalSize;
测试结果: