- 文件上传 大于20M时分块上传
- 创建一个FileHandler类 来进行文件的一些判断处理
import { getSTS } from "@/utils/api/global";
import MD5 from "spark-md5";
import {uploadChunks, uploadSFile} from '@/utils/api/upload.js'
import Chunk from "@/utils/share/chunk";
const FileHandler = class FileHandler {
constructor(options) {
this.options = options;
}
uploadSingleFile(filePath, file) {
const formData = new FormData()
formData.append('file', file)
return new Promise(function(resolve, reject) {
uploadSFile(formData).then(res => {
resolve(res)
})
})
}
uploadFile = (path, file, progress, tempCheckpoint) => {
const filePath = path + "/" + hashFilePath(file) + "/" + file.name;
let multipartUpload = function() {
console.log(progress)
return Chunk.uploadFileChunk(file,progress)
}
if (file.size > 1024 * 1024 * 20) {
try {
return multipartUpload();
} catch (e) {
return new Promise(function(resolve, reject) {
reject("文件上传失败,请重试!");
});
}
} else {
return this.uploadSingleFile(filePath, file)
}
}
isImage = async function(file) {
return (await isPNG(file)) || (await isJPG(file));
};
isVideo = async function(file) {
return (await isMp4(file)) || (await isWebm(file));
};
isAttachment = async function(file, exts) {
if (exts.includes("word")) {
if (await isDoc(file)) return true;
}
if (exts.includes("excel")) {
if ((await isXls(file)) || (await isDocx(file))) return true;
}
if (exts.includes("ppt")) {
if (await isDocx(file)) return true;
}
if (exts.includes("pdf")) {
if (await isPDF(file)) return true;
}
if (exts.includes("jpg")) {
if (await isJPG(file)) return true;
}
if (exts.includes("png")) {
if (await isPNG(file)) return true;
}
if (exts.includes("txt")) {
const index = file.name.lastIndexOf(".");
const ext = file.name.substr(index + 1);
if (ext === "txt") return true;
}
return false;
};
};
function getFileType(file, length) {
return new Promise(resolve => {
const reader = new FileReader();
reader.onload = function(e) {
const res = e.target.result
.split("")
.map(v => v.charCodeAt())
.map(v => v.toString(16).toUpperCase())
.map(v => v.padStart(2, "0"))
.join("");
resolve(res);
};
reader.readAsBinaryString(file.slice(0, length));
});
}
function hashFilePath(file) {
const t = new Date().getTime().toString();
let path = MD5.hash(t);
path = path + "_" + file.name;
return path;
}
async function isPNG(file) {
const res = await getFileType(file, 4);
return res === "89504E47";
}
async function isJPG(file) {
const res = await getFileType(file, 3);
return res === "FFD8FF";
}
async function isPDF(file) {
const res = await getFileType(file, 7);
return res === "255044462D312E";
}
async function isDocx(file) {
const res = await getFileType(file, 4);
return res === "504B0304";
}
async function isXls(file) {
const res = await getFileType(file, 4);
return res === "D0CF11E0";
}
async function isDoc(file) {
const res = await getFileType(file, 4);
return res === "0D444F43";
}
async function isMp4(file) {
const res = await getFileType(file, 8);
return (
res === "0000001466747970" ||
res === "0000001866747970" ||
res === "0000001C66747970" ||
res === "0000002066747970"
);
}
async function isWebm(file) {
const res = await getFileType(file, 4);
return res === "1A45DFA3";
}
export async function isMp3(file) {
const res = await getFileType(file, 3);
return res === "494443";
}
export default new FileHandler();
- 创建一个块处理文件 Chunk.js
import sparkMD5 from 'spark-md5'
import axios from 'axios'
import pathname from '@/utils/api/pathname'
import { uploadChunks as uploadChunkFunc } from '@/utils/api/upload.js'
const Chunk = class Chunk {
constructor(options) {
this.options = options;
this.CHUNK_SIZE = 5*1024*1024
}
createFileChunk = function (file,size=this.CHUNK_SIZE) {
const chunks = []
let cur = 0
while(cur<file.size){
chunks.push({index:cur, file:file.slice(cur,cur+size)})
cur+=size
}
console.log(chunks)
return chunks
}
calculateHashIdle = async function calculateHashIdle(){
const chunks = this.chunks
return new Promise(resolve=>{
const spark = new sparkMD5.ArrayBuffer()
let count = 0
const appendToSpark = async file=>{
return new Promise(resolve=>{
const reader = new FileReader()
reader.readAsArrayBuffer(file)
reader.onload = e=>{
spark.append(e.target.result)
resolve()
}
})
}
const workLoop = async deadline=>{
while(count<chunks.length && deadline.timeRemaining()>1){
await appendToSpark(chunks[count].file)
count++
if(count<chunks.length){
this.hashProgress = Number(
((100*count)/chunks.length).toFixed(2)
)
}else{
this.hashProgress = 100
resolve(spark.end())
}
}
window.requestIdleCallback(workLoop)
}
window.requestIdleCallback(workLoop)
})
}
calculateHashSample = async function (files){
return new Promise(resolve=>{
const spark = new sparkMD5.ArrayBuffer()
const reader = new FileReader()
const file = files
const size = file.size
const offset = 2*1024*1024
let chunks = [file.slice(0,offset)]
let cur = offset
while(cur<size){
if(cur+offset>=size){
chunks.push(file.slice(cur, cur+offset))
}else{
const mid = cur+offset/2
const end = cur+offset
chunks.push(file.slice(cur, cur+2))
chunks.push(file.slice(mid, mid+2))
chunks.push(file.slice(end-2, end))
}
cur+=offset
}
reader.readAsArrayBuffer(new Blob(chunks))
reader.onload = e=>{
spark.append(e.target.result)
this.hashProgress = 100
resolve(spark.end())
}
})
}
uploadFileChunk = async function (file,progress) {
console.log(progress)
let chunksAll = this.createFileChunk(file)
const hash = await this.calculateHashSample(file)
const chunks = chunksAll.map((chunk,index)=>{
let intIndex = (parseInt(index)+1)
return {
chunkNumber: intIndex,
chunkSize: this.CHUNK_SIZE,
currentChunkSize: chunk.file.size,
fileId: hash,
fileName: file.name,
multipartFile: chunk.file,
totalChunks: chunksAll.length,
totalSize: file.size
}
})
const p = this.uploadChunks(chunks,progress)
console.log(p)
return p;
}
uploadChunks = function (chunks,progress) {
const requests = chunks
.map((chunk,index)=>{
const form = new FormData();
form.append('multipartFile',chunk.multipartFile)
form.append('chunkNumber',chunk.chunkNumber)
form.append('chunkSize',chunk.chunkSize)
form.append('currentChunkSize',chunk.currentChunkSize)
form.append('fileId',chunk.fileId)
form.append('fileName',chunk.fileName)
form.append('totalChunks',chunk.totalChunks)
form.append('totalSize',chunk.totalSize)
return {chunk:form, index:chunk.chunkNumber,error:0}
})
const p = this.sendRequest(requests, 4,progress)
console.log(p)
return p
}
sendRequest = function(chunks, limit = 1,progress) {
return new Promise((resolve,reject)=>{
const len = chunks.length
let counter = 0
let isStop = false;
const start = ()=>{
if(isStop) {
return
}
const task = chunks.shift();
console.log(task)
if(task) {
const {chunk,index} = task
uploadChunkFunc(chunk).then((res)=>{
if(res.code == 0) {
if(counter == len-1) {
progress(res.data.progress)
resolve(res)
}else{
counter++;
progress(res.data.progress)
start();
}
}
}).catch((e)=>{
reject()
console.log('error',task.error)
})
}else{
limit = -1;
return false;
}
}
while(limit>0) {
start();
limit -= 1;
}
})
}
}
export default new Chunk();
- 调用 文件上传
import FileHandler from '@/utils/share/upload.js'
upload (file) {
FileHandler.uploadFile(this.path, file, p => {
let progress = Math.round(p * 100)
this.$emit("progress", progress)
}).then(res => {
const { objectUrl } = res.data;
const obj = {
url: objectUrl,
name: file.name
}
this.$emit('upload', obj)
}).catch((e)=>{
this.$emit("progress", 100);
this.$message(`上传失败了请重试~`);
this.attachmentList = [];
})
},