大文件切片上传-vue3.0

大文件切片上传(也称为分片上传)是一种处理大文件上传的有效方法,它通过将大文件分割成多个较小的部分(即切片或分片),然后分别上传这些切片到服务器,最后在服务器上将这些切片合并成原始文件。以下是关于大文件切片上传的详细步骤和注意事项

切片上传核心流程

🌟  确定切片大小

🌟  文件切片(File.slice())

🌟  并发控制 分片上传

🌟  验证文件完整性


基于vue3.0代码实现

<template>
    <el-upload
        v-model:file-list="fileList"
        v-bind="getAttrs($attrs)"
        :on-success="onSuccess"
        :on-error="onError"
        :on-progress="onProgress"
    >
        <template #trigger>
            <el-button type="primary">选择文件</el-button>
        </template>
    </el-upload>
    <el-progress :percentage="percentage ?? 0" v-if="percentage > 0"></el-progress>
</template>

<script setup>
import {computed, reactive} from 'vue';
import SparkMD5 from 'spark-md5';
// import {ApiInitUpload,  ApiUploadChunk, ApiUploadFinish} from './api';
import {CONST} from './config';
defineOptions({
    name: 'PlUpload'
});
const emit = defineEmits(['upldate:modelValue']);

const props = defineProps({
    uploadMode: {
        type: String,
        default: 'default',
    },
    chunkSize: {
        type: Number,
        default: () => 5 * 1024 * 1024,
    },
    data: {
        type: Object,
        default: () => {
            return {};
        },
    },
    limit: {
        type: Number,
    },
    modelValue: {
        type: Array,
    },
    onSuccess: {
        type: Function,
    },
    onError: {
        type: Function,
    },
    onProgress: {
        type: Function,
    },
    onUploadInit: {
        type: Function,
    },
    onUploadFinish: {
        type: Function,
    },
    onUploadChunk: {
        type: Function,
    },
});

const uploadProgress = reactive({
    progress: [],
    total: 0,

});

const fileList = computed({
    get: () =>{
        return props.modelValue;
    },
    set: (val) => {
        console.log('set', val)
        return emit('update:modelValue', val);
    },
});

async function onUpload(response) {
    const chunkList = getChunkList(response.file, props.chunkSize);
    const hash = await getChunkHash(chunkList);
    uploadProgress.total = chunkList.length;

    init(hash);
    uploadChunks(chunkList, hash, response.file.name);
};
function getChunkHash(chunkList) {
    const spark = new SparkMD5();
    const fileReader = new FileReader();
    let currentChunk = 0;

    return new Promise((resolve, reject) => {

        fileReader.readAsArrayBuffer(chunkList[currentChunk]);

        fileReader.onload = (e) => {
            currentChunk++;
            if (currentChunk < chunkList.length) {
                spark.append(e.target.result);
                fileReader.readAsArrayBuffer(chunkList[currentChunk]);
            }
            else {
                resolve(spark.end());
            }
        };
    });

}
function getChunkList(file, chunkSize) {

    const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;

    const chunks = Math.ceil(file.size / chunkSize);

    const chunkList = [];

    for(let i = 0; i < chunks; i++) {
        chunkList.push(blobSlice.call(file, i * chunkSize, (i + 1) * chunkSize));
    }

    return chunkList;

}
// 通知开始上传
function init() {
    props?.onUploadInit();
}
// 上传接口
function uploadChunks(chunkList, hash, name) {
    const requestList = chunkList.map((item, index) => {
        const formData = new FormData(); 
        formData.append('file', item);
        formData.append('hash', hash);
        formData.append('file_name', `${index}_${name}`);
        // 添加上传的额外参数
        for (const key in props.data ?? {}) {
            formData.append(key, props.data[key]);
        };
        // return ApiUploadChunk(formData, arg => onUploadProgress(arg, index));
        return props?.onUploadChunk(formData, arg => onUploadProgress(arg, index));
    });
    Promise.all(requestList).then(res => {
        uploadFinish(hash);
    }).catch(err => {
        hubEvent(CONST.ON_ERROR, err);
        throw err;
    });
}

function onUploadProgress(payload, index) {
    uploadProgress.progress[index] = payload.progress;
    hubEvent(CONST.ON_PROGRESS);
};

// 上传完成
async function uploadFinish(hash) {
    try {
        const data = await props?.onUploadFinish({hash});
        hubEvent(CONST.ON_SUCCESS, data);
    }
    catch (err){
        hubEvent(CONST.ON_ERROR, err);
        throw err;
    } 
}

const percentage = computed(() => {
    const {progress, total} = uploadProgress;
    const count = progress?.reduce((total, current) => {
        total+=current;
        return total;
    }, 0);
    return Number(((count / total) * 100)?.toFixed() ?? 0);
});

// utils
function hubEvent(name, data) {
    const event = props[name];
    if (event) {
        event(data);
    }
}

function getAttrs(attrs) {
    const newAttrs = {
        ...attrs,
        [CONST.HTTP_REQUEST]: onUpload,
    }
    if (props.mode !== CONST.MODE_CHUNK) {
        Reflect.deleteProperty(newAttrs, CONST.HTTP_REQUEST);
        Reflect.set(newAttrs, CONST.LIMIT, 1);
    }
    return newAttrs;
}
</script>

配置文件

/**
*config
*/
export const CONST = {
    'MODE_CHUNK': 'chunk',
    'LIMIT': 'limit',
    'HTTP_REQUEST': 'http-request',
    'ON_SUCCESS': 'on-success',
    'ON_ERROR': 'on-error',
    'ON_PROGRESS': 'on-progress',
}

名次解释

SparkMD5:生成文件唯一的hash,用来验证文件的完整性

File.prototype.slice:用于文件进行切片

onUploadInit: 文件初始化调用接口,用于后端创建存放切片文件的文件夹

onUploadChunk:分片上传接口

onUploadFinish:文件上传完成验证完整性的接口

webwork:多线程,提升性能

  • 5
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值