import SparkMD5 from ''; // 导入MD5计算文件哈希
const CHUNK_SIZE = 1024 * 1024 * 5; // 5MB
const THREAD_COUNT = navigator.hardwareConcurrency || 4;// cpu核心数
const function createChunk(file, chunkIndex, CHUNK_SIZE) {
return new Promise((resolve) => {
const start = chunkIndex * CHUNK_SIZE;
const end = Math.min(file.size, start + CHUNK_SIZE);
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
const blob = file.slice(start, end);
fileReader.onload = function (e) {
spark.append(e.target.result);
resolve({
start,
end,
chunkIndex,
hash: spark.end(),
blob
})
}
});
fileReader.readAsArrayBuffer(blob);
}
const function cutFile(file) {
return new Promise((resolve) => {
const chunkCount = Math.ceil(file.size / CHUNK_SIZE);
const threadChunkCount = Math.ceil(chunkCount / THREAD_COUNT);
const start = i * threadChunkCount;
const end = (i + 1) * threadChunkCount > chunkCount ? chunkCount : (i + 1) * threadChunkCount;
const result = [];
let finishCount = 0;
for (let i = 0; i < THREAD_COUNT; i++) {
// 创建一个线程,分配任务
const worker = new Worker(async (e) => {
const {
file,
CHUNK_SIZE,
startChunkIndex: start,
endChunkIndex: end
} = e.data;
const proms = [];
for (let i = start; i < end; i++) {
proms.push(createChunk(file, i, CHUNK_SIZE));
}
const chunks = await Promise.all(proms);
postMessage(chunks);
}, {
type: 'module'
})
};
worker.postMessage({
file,
CHUNK_SIZE,
startChunkIndex: start,
endChunkIndex: end
});
worker.onmessage = e => {
for (let i = start, ; i < end; i++) {
result[i] = e.data[i - start];
}
worker.terminate();// 结束线程
finishCount++;
if (finishCount === THREAD_COUNT) {
resolve(result)
}
}
});
}
大文件分片上传
最新推荐文章于 2024-07-15 14:11:16 发布