设置next.config.js, 生产环境设置assetPrefix 为s3连接
const isDev = process.env.NODE_ENV !== 'production';
const version = require('./package.json').version;
assetPrefix: isDev ? '' : `https://${process.env.AWS_REGION}.amazonaws.com/${process.env.AWS_S3_BUCKET_NAME}/${version}`
upload.js
require('dotenv').config();
const fs = require('fs');
const readDir = require('recursive-readdir');
const path = require('path');
const AWS = require('aws-sdk');
const mime = require('mime-types');
const version = require('./package.json').version;
AWS.config.update({
region: process.env.AWS_S3_REGION,
accessKeyId: process.env.AWS_S3_ACCESS_KEY,
secretAccessKey: process.env.AWS_S3_SECRET_KEY,
maxRetries: 3
});
const getDirectoryFilesRecursive = (dir, ignores = []) => {
return new Promise((resolve, reject) => {
readDir(dir, ignores, (err, files) => (err ? reject(err) : resolve(files)));
});
};
const generateFileKey = (fileName, toReplace, replaced) => {
const S3objectPath = fileName.split(toReplace)[1];
return version + replaced + S3objectPath;
};
const s3 = new AWS.S3();
const uploadToS3 = async (fileArray, toReplace, replaced) => {
try {
fileArray.map(file => {
const S3params = {
Bucket: process.env.AWS_S3_BUCKET_NAME,
Body: fs.createReadStream(file),
Key: generateFileKey(file, toReplace, replaced),
ACL: 'public-read',
ContentType: String(mime.lookup(file)),
ContentEncoding: 'utf-8',
CacheControl: 'immutable,max-age=31536000,public'
};
s3.upload(S3params, function(err, data) {
if (err) {
console.error(err);
process.exitCode = 1;
} else {
console.log(`Assets uploaded to S3:`, data.key);
}
});
});
} catch (error) {
console.error(error);
}
};
const start = async function(dict) {
for (var i = 0; i < dict.length; i++) {
const files = await getDirectoryFilesRecursive(path.resolve(__dirname, dict[i].filePath), ['.DS_Store', 'BUILD_ID']);
uploadToS3(files, dict[i].toReplace, dict[i].replaced);
}
}
start([
{
filePath: '.next',
toReplace: '.next/',
replaced: '/_next/'
}
]);