先贴代码
const change = (uploadFile: UploadFile, uploadFiles: UploadFiles) => {
const findFile = uploadFiles.find((f) => f.uid === uploadFile.uid) as any;
const FileTypeArr = uploadFile.name.split(".");
switch (FileTypeArr[FileTypeArr.length - 1]) {
case "jpg" || "png" || "pdf":
FileTypeName = "图片";
break;
case "ppt" || "pptx":
FileTypeName = "ppt";
break;
case "doc" || "docx":
FileTypeName = "文档";
break;
case "xls" || "xlsx":
FileTypeName = "表格";
break;
case "mp4" || "avi":
FileTypeName = "视频";
break;
}
switch (findFile.status) {
case "ready": {
//准备上传 校验文件MD5值
findFile.State = i18n.tc(`Bus.File.Checking`);
findFile.percentage = 0;
fileMd5(findFile.raw, true, 0, async (percentage: number, e: any, md5: any) => {
if (percentage >= 0) {
findFile.percentage = percentage;
if (percentage >= 100) {
findFile.status = "uploading";
findFile.State = i18n.tc(`Bus.File.Uploading`);
findFile.percentage = 0;
// 向服务端申请上传文件
const res = await api.Sys.CreateUpload(
props.Service,
props.Module,
uploadFile.raw?.name,
uploadFile.raw?.size,
md5,
props.Bus
);
// console.log(props.Service, props.Module, uploadFile.raw?.name, uploadFile.raw?.size, md5, props.Bus);
// console.log(res, "CreateUpload");
if (res && res.Data) {
findFile.FileID = res.Data.FileID;
if (res.Data.QuickUpload) {
findFile.status = "success";
emitFileIDs();
} else {
//开始上传文件
fileMd5(
findFile.raw,
false,
res.Data.Breakpoint,
(percentage: number, e: any, md5: any) => {
if (percentage >= 0) {
findFile.percentage = percentage;
if (percentage >= 100) {
findFile.status = "success";
emitFileIDs();
}
}
},
res.Data.FileID
);
}
} else {
findFile.status = "fail";
}
}
}
// console.log(progess, e, md5);
});
break;
}
}
if (uploadFile.name && uploadFile.name.length > 24) {
const filtSuffix = uploadFile.name.split(".").pop();
const newName = uploadFile.name.substring(0, 24 - filtSuffix?.length - 1) + "...." + filtSuffix;
uploadFile.newName = newName;
}
};
const fileMd5 = (file: any, onlyCheck: boolean, position: number, callback: any, fileID?: string) => {
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunkSize = 4096 * 1024, // Read in chunks of 5MB
chunks = Math.ceil(file.size / chunkSize),
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();
let currentChunk = 0;
fileReader.onload = function async(e: any) {
// console.log("read chunk nr", currentChunk + 1, "of", chunks);
spark.append(e.target.result); // Append array buffer
const chunkSpark = new SparkMD5.ArrayBuffer();
chunkSpark.append(e.target.result);
const chunkMd5 = chunkSpark.end();
currentChunk++;
if (!onlyCheck) {
//传块
uploadChunk(
fileID,
position + (currentChunk - 1) * chunkSize,
e.target.result.byteLength,
chunkMd5,
new Uint8Array(e.target.result),
currentChunk >= chunks
);
}
if (currentChunk < chunks) {
callback((currentChunk / chunks) * 100, null, null);
loadNext();
} else {
let blockMd5 = "";
blockMd5 = spark.end();
callback(100, null, blockMd5);
}
};
fileReader.onerror = function () {
callback(-1, "oops, something went wrong.", null);
};
function loadNext() {
const start = position + currentChunk * chunkSize,
end = start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
loadNext();
};
const uploadChunk = async (fileID: string, start: number, size: number, md5: string, content: any, last: boolean) => {
const res = await api.Sys.UploadChunk(props.Service, props.Module, fileID, start, size, md5, content, last);
// console.log(props.Service, props.Module, fileID, start, size, md5, content, last);
if (res && res.Data) {
console.log(res, "123123res");
}
//上传
};
这个是用的el-upload组件的上传,change时间内的uploadFile: UploadFile, uploadFiles: UploadFiles是上传文件的信息。switch case函数是判断文件的具体类型,主要是想记录一下里面的fileMd5函数,这个函数内的
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunkSize = 4096 * 1024, // Read in chunks of 5MB
chunks = Math.ceil(file.size / chunkSize),
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();这个blobSlice是获取文件切块的方法,mozSlice与webkitSlice是为了兼容低版本的Firefox 和 Chrome浏览器。chunkSize是切块的大小,chunks是要被切成多少块,spark
: 使用 SparkMD5.ArrayBuffer
创建了一个新的实例,用于计算整个文件的MD5哈希值。SparkMD5
是一个快速生成MD5哈希值的JavaScript库,能够处理大文件和二进制数据。fileReader
: 创建了一个 FileReader
实例,用于异步读取文件的内容
uploadChunk函数则是将分好的文件块上传至服务器。
暂时就这么多,如果之后有新的认识之后再进行补充。