文章目录
一、总体思路
1、前端以分片的形式计算出整个文件的 md5 值以及文件大小 size;
2、使用 md5、 size 去请求后台判断文件是否已经存在;
3、如果根据 md5 查询有数据,且数据大小与 size 一致,则文件已存在,此时跳到步骤 10
4、如果根据 md5 查询有数据,且数据大小与 size 不一致,则文件上传了一部分,此时返回主键 id 以及已经上传的文件分片索引,并跳到步骤 6
5、如果根据 md5 查询无数据,则未上传过此文件。此时向数据库中插入一条数据,仅保存 name、md5,并返回主键 id 作为分片文件的父 id;
6、前端将文件分片,遍历这些分片,进行文件分片上传。分片文件上传的同时,根据步骤 4 返回的文件分片索引,用以判断分片是否已经上传,若已上传则跳过,否则进行上传分片文件;
7、若上传失败,则保存失败的文件分片索引,最后进行重试;
8、文件分片上传全部结束,通知服务器进行合并;
9、合并结束,删除保存的分片临时文件以及数据库分片数据
10、结束!
二、数据库表结构
mysql> show create table file;
+-------+-------------------------------------------------------
| Table | Create Table
+-------+-------------------------------------------------------
| file | CREATE TABLE `file` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`patch_index` int(11) DEFAULT NULL,
`parent` int(11) DEFAULT NULL,
`name` varchar(64) NOT NULL,
`path` varchar(255) DEFAULT NULL,
`md5` varchar(255) NOT NULL,
`size` bigint(11) NOT NULL,
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=1179 DEFAULT CHARSET=utf8 |
+-------+-------------------------------------------------------
1 row in set (0.00 sec)
其中 patch_index 为分片上传顺序,合并文件时需要以此数据为准,否则文件可能打不开(我刚开始就想着直接用自增 id 作为合并文件的顺序。但是我上传了一个 1.37 GB 视频后,文件无法播放,因为在并发下,自增 id 的顺序和分片文件的顺序已经不一定一致了)
三、前端代码
页面代码
1、html 代码
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>分片上传</title>
<script type="text/javascript" src="http://cdn.bootcss.com/jquery/1.12.4/jquery.min.js"></script>
<script type="text/javascript" src="/js/spark-md5.min.js"></script>
<script type="text/javascript" src="/js/patchUpload.js"></script>
</head>
<body>
<input id="file" type="file"/>
<br/><br/>
<button id="upload">分片上传</button>
<p id="process">0<span>%</span></p>
<button id="try" style="display: none">重试</button>
</body>
</html>
2、js 代码
var patchUpload = {
/**
* 分片上传成功索引
*/
succeed: [],
/**
* 分片上传失败索引
*/
failed: [],
/**
* 失败重试次数
*/
try: 3,
/**
* 分片大小,这里是 5MB
*/
shardSize: 5 * 1024 * 1024,
/**
* 初始化
*/
init: function () {
this.setEvent();
},
/**
* 设置页面事件监听
*/
setEvent: function () {
var me = this;
$("#upload").click(function (e) {
var files = $("#file")[0].files;
if(files.length < 1) {
alert("请选择文件!");
return;
}
me.succeed = [];
me.failed = [];
me.try = 3;
me.loadProcess(0);
me.md5checkUpload(files[0]);
});
$("#try").click(function (e) {
var files = $("#file")[0].files;
me.try = 3;
me.md5checkUpload(files[0]);
});
},
/**
* 检查文件是否已存在
* @param file
* @param md5
*/
checkUpload: function (file, md5) {
var me = this;
$.ajax({
url: "/file/exists",
type: "get",
data: {md5: md5, size: file.size},
dataType: "json",
success: function(data) {
if (data.status === 1) {
me.loadProcess(100);
alert("急速秒传!");
return ;
}
if(data.id && data.status === 0) {
me.succeed = data.patchIndex;
me.upload(data.id, file);
return ;
}
me.upload(me.prepareUpload(md5, file), file);
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert("服务器错误!");
}
});
},
/**
* 文件不存在时,插入文件的基本信息,为上传文件分片做准备
* @param md5
* @param file
* @returns {*}
*/
prepareUpload: function (md5, file) {
var id;
$.ajax({
url: "/file/new",
type: "post",
async: false,
data: JSON.stringify({name: file.name, md5: md5, size: 0}),
contentType: "application/json;charset=utf-8",
dataType: "json",
success: function(data) {
if(data && data.id) {
id = data.id;
return;
}
alert("上传文件失败!");
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert("服务器错误!");
}
});
return id;
},
/**
* 上传文件
* @param id
* @param file
*/
upload: function (id, file) {
var me = this;
if(!id) return;
var shardCount = Math.ceil(file.size / this.shardSize);
for (var i = 0; i < shardCount; i++) {
if(me.succeed.length !== 0 && me.succeed.indexOf(i) > -1 && me.failed.indexOf(i) === -1) {
continue;
}
this.uploadPatch(id, file, i, shardCount);
}
},
/**
* 上传分片文件
* @param parent
* @param file
* @param index
* @param shardCount
*/
uploadPatch: function (parent, file, index, shardCount) {
var me = this;
var start = index * this.shardSize;
var end = Math.min(file.size, start + this.shardSize);
var patch = file.slice(start, end);
var spark = new SparkMD5();
var reader = new FileReader();
reader.readAsBinaryString(patch);
$(reader).load(function (e) {
spark.appendBinary(e.target.result);
var md5 = spark.end();
var form = new FormData();
form.append("index", index);
form.append("parent", parent);
form.append("md5", md5);
form.append("size", patch.size);
form.append("patch", patch);
form.append("name", file.name + "-patch-" + index);
$.ajax({
url: "/file/patch/upload",
type: "post",
data: form,
processData: false,
contentType: false,
dataType: "json",
success: function(data) {
if(!data || !data.ok) {
me.failed.push(index);
console.log("上传分片" + index + "失败!");
return ;
}
me.succeed.push(index);
console.log("上传分片" + index + "成功!");
me.loadProcess(((me.succeed.length - 1) * me.shardSize + patch.size) / file.size * 100);
me.mergePatch(parent, file, shardCount);
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
me.failed.push(index);
console.log("服务器错误,上传分片" + index + "失败!");
me.tryAgain(parent, file, shardCount);
}
});
});
},
/**
* 请求合并文件
* @param parent
* @param file
* @param shardCount
*/
mergePatch: function (parent, file, shardCount) {
var me = this;
if(me.succeed.length + me.failed.length !== shardCount) return;
if(me.failed.length !== 0) {
me.tryAgain(parent, file, shardCount);
return ;
}
$.ajax({
url: "/file/patch/merge",
type: "post",
data: {parent: parent, size: file.size},
dataType: "json",
success: function(data) {
if (data && data.ok) {
me.loadProcess(100);
alert("上传文件成功!");
return ;
}
alert("上传文件失败!");
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert("服务器错误!");
}
});
},
/**
* 重试
*/
tryAgain: function (parent, file, shardCount) {
var me = this;
if(me.succeed.length + me.failed.length !== shardCount) return;
if(me.failed.length === 0) {
me.mergePatch(parent, file, shardCount);
return ;
}
if(me.try === 0) {
$("#try").css("display", "block");
return ;
}
me.try--;
console.log("重试...");
while(me.failed.length !== 0) {
me.uploadPatch(parent, file, me.failed.pop(), shardCount);
}
},
/**
* 读取进度条(见笑了)
* @param process
*/
loadProcess: function (process) {
process = Math.min(100, process);
if(process === 100) {
$("#try").css("display", "none");
}
$("#process").html(process.toFixed(2) + '<span>%</span>');
},
/**
* 获取文件的 md5 值
* @param file
* @returns {*|number}
*/
md5checkUpload: function (file) {
var me = this;
var index = 0;
var shardCount = Math.ceil(file.size / this.shardSize);
var spark = new SparkMD5.ArrayBuffer();
var fileReader = new FileReader();
var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
fileReader.onload = function (e) {
index++;
spark.append(e.target.result);
if(index < shardCount) {
loadNext();
return;
}
me.checkUpload(file, spark.end());
};
function loadNext() {
var start = index * me.shardSize;
var end = Math.min(start + me.shardSize, file.size);
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
loadNext();
}
};
$(function () {
patchUpload.init();
});
四、后端代码
普通的 springboot 项目,pom.xml 代码就不贴了
1、application.yml
spring:
datasource:
druid:
driver-class-name: com.mysql.jdbc.Driver
url: jdbc:mysql://localhost:3306/test?useSSL=false
username: root
password: password
initial-size: 5
min-idle: 5
max-active: 100
keep-alive: true
min-evictable-idle-time-millis: 28740000
max-evictable-idle-time-millis: 28800000
servlet:
multipart:
max-file-size: 10MB
max-request-size: 100MB
mybatis:
configuration:
lazy-loading-enabled: true
aggressive-lazy-loading: false
cache-enabled: true
map-underscore-to-camel-case: true
logging:
level:
com.kfyty.mybatis.auto.mapper.handle: info
com.kfyty.upload.mapper: debug
2、实体类(自己写的代码生成器生成的)
package com.kfyty.upload.pojo;
import java.util.Date;
import lombok.Data;
/**
* TABLE_NAME: file
* TABLE_COMMENT:
*
* By kfyty
*/
@Data
public class FilePojo {
/**
*
*/
private Integer id;
/**
*
*/
private Integer patchIndex;
/**
*
*/
private Integer parent;
/**
*
*/
private String name;
/**
*
*/
private String path;
/**
*
*/
private String md5;
/**
*
*/
private Long size;
/**
*
*/
private Date createTime;
public FilePojo() {
}
public FilePojo(Integer patchIndex, Integer parent, String name, String path, String md5, Long size) {
this.patchIndex = patchIndex;
this.parent = parent;
this.name = name;
this.path = path;
this.md5 = md5;
this.size = size;
}
}
3、一个 vo(帮助理解 js)
package com.kfyty.upload.vo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class FileExists {
/**
* 文件 id
*/
private Integer id;
/**
* 文件状态
* -1: 不存在
* 1: 已存在
* 0: 部分存在
*/
private Integer status;
/**
* 已上传的文件分片索引
*/
private List<Integer> patchIndex;
public static FileExists nonExistent() {
return new FileExists(null, -1, null);
}
public static FileExists exists(Integer id) {
return new FileExists(id, 1, null);
}
public static FileExists partExistent(Integer id, List<Integer> patchIndex) {
return new FileExists(id, 0, patchIndex);
}
}
4、Mapper 接口(使用了自己写的一个扩展包,使用一个注解就能查询,所以没有 Mapper.xml 文件)
package com.kfyty.upload.mapper;
import com.kfyty.mybatis.auto.mapper.BaseMapper;
import com.kfyty.mybatis.auto.mapper.annotation.AutoMapper;
import com.kfyty.mybatis.auto.mapper.annotation.SelectKey;
import com.kfyty.upload.pojo.FilePojo;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
@SelectKey
@AutoMapper(entity = FilePojo.class)
public interface FileMapper extends BaseMapper<Integer, FilePojo> {
@AutoMapper
int updateByIdSetPathAndSize(@Param("id") Integer id, @Param("path") String path, @Param("size") Long size);
@AutoMapper
int deleteByParent(@Param("parent") Integer parent);
@AutoMapper
List<Integer> findPatchIndexByParent(@Param("parent") Integer parent);
@AutoMapper
FilePojo findByMd5(@Param("md5") String md5);
@AutoMapper
FilePojo findByParentAndMd5(@Param("parent") Integer parent, @Param("md5") String md5);
@AutoMapper
List<FilePojo> findByParentOrderByPatchIndexAsc(@Param("parent") Integer parent);
}
5、上传文件的工具类
package com.kfyty.upload.utils;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.UUID;
public abstract class UploadUtil {
private static final String SAVE_PATH = "D:/temp/patch/";
public static String initPath() {
String date = new SimpleDateFormat("yyyy-MM-dd").format(new Date());
return SAVE_PATH + date + "/";
}
public static String parseFileType(String fileName) {
return !fileName.contains(".") ? "" : fileName.substring(fileName.lastIndexOf("."));
}
public static String saveFile(MultipartFile source, Long size) throws IOException {
if(source.getSize() != size) {
throw new RuntimeException("上传字节数与接收字节数不符!");
}
File file = new File(initPath());
if(!file.exists() && !file.mkdirs()) {
throw new RuntimeException("创建文件夹失败!");
}
String fileType = parseFileType(source.getOriginalFilename());
while(true) {
String saveFileName = UUID.randomUUID() + fileType.toLowerCase();
File saveFilePath = new File(file.getPath(), saveFileName);
if(saveFilePath.exists()) {
continue;
}
source.transferTo(saveFilePath);
return saveFilePath.getAbsolutePath();
}
}
public static String mergeFile(String fileType, List<String> fileNames) throws IOException {
File file = new File(initPath());
if(!file.exists() && !file.mkdirs()) {
throw new RuntimeException("创建文件夹失败!");
}
File saveFilePath = null;
do {
String saveFileName = UUID.randomUUID() + fileType.toLowerCase();
saveFilePath = new File(file.getPath(), saveFileName);
} while (saveFilePath.exists());
FileChannel out = new FileOutputStream(saveFilePath).getChannel();
for (String fileName : fileNames) {
File patch = new File(fileName);
FileChannel in = new FileInputStream(patch).getChannel();
in.transferTo(0, in.size(), out);
in.close();
patch.delete();
}
out.close();
return saveFilePath.getAbsolutePath();
}
}
6、启动类和控制器
package com.kfyty.upload;
import com.kfyty.upload.mapper.FileMapper;
import com.kfyty.upload.pojo.FilePojo;
import com.kfyty.upload.utils.UploadUtil;
import com.kfyty.upload.vo.FileExists;
import com.kfyty.upload.vo.Result;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
@Slf4j
@RestController
@SpringBootApplication
public class UploadApplication {
@Autowired
private FileMapper fileMapper;
@PostMapping("/file/new")
public FilePojo uploadFile(@RequestBody FilePojo file) {
fileMapper.insert(file);
return file;
}
@GetMapping("/file/exists")
public FileExists fileExists(String md5, Long size) {
FilePojo file = fileMapper.findByMd5(md5);
if(file == null) {
return FileExists.nonExistent();
}
if(file.getSize().equals(size)) {
return FileExists.exists(file.getId());
}
return FileExists.partExistent(file.getId(), fileMapper.findPatchIndexByParent(file.getId()));
}
@PostMapping("/file/patch/upload")
public Result filePatchExists(String name, Integer index, Integer parent, String md5, Long size, MultipartFile patch) throws IOException {
FilePojo file = fileMapper.findByParentAndMd5(parent, md5);
if(file == null || !file.getSize().equals(size)) {
Optional.ofNullable(file).ifPresent(e -> fileMapper.deleteByPk(e.getId()));
fileMapper.insert(new FilePojo(index, parent, name, UploadUtil.saveFile(patch, size), md5, size));
return Result.OK();
}
return file.getSize().equals(size) ? Result.OK() : Result.FAIL();
}
@Transactional
@PostMapping("/file/patch/merge")
public Result filePatchMerge(Integer parent, Long size) throws IOException {
FilePojo fileInfo = fileMapper.findByPk(parent);
List<FilePojo> patchs = fileMapper.findByParentOrderByPatchIndexAsc(parent);
Long total = patchs.stream().mapToLong(FilePojo::getSize).sum();
if(fileInfo == null || CollectionUtils.isEmpty(patchs) || !total.equals(size)) {
fileMapper.deleteByParent(parent);
log.warn("total: {}, require size: {}, and delete file to retry !", total, size);
return Result.FAIL();
}
String fileType = UploadUtil.parseFileType(fileInfo.getName());
String path = UploadUtil.mergeFile(fileType, patchs.stream().map(FilePojo::getPath).collect(Collectors.toList()));
fileMapper.updateByIdSetPathAndSize(parent, path, total);
fileMapper.deleteByParent(parent);
return Result.OK();
}
public static void main(String[] args) {
ConfigurableApplicationContext run = SpringApplication.run(UploadApplication.class, args);
}
}
7、控制器中用到的一个辅助类
package com.kfyty.upload.vo;
import lombok.Data;
import lombok.AllArgsConstructor;
import java.util.function.Predicate;
@Data
@AllArgsConstructor
public class Result<T> {
private boolean ok;
private T data;
public Result() {
}
public Result(boolean ok) {
this(ok, null);
}
public Result(T data) {
this(true, data);
}
public static Result OK() {
return new Result(true);
}
public static Result FAIL() {
return new Result(false);
}
public static Result test(Object o, Predicate<Object> p) {
return p.test(o) ? new Result<>(o) : FAIL();
}
}
五、demo 结构
六、完结撒花
。。。。。。。。。。。。。。。。。。。。。。。。。。。
PS:Mapper 接口中用到的扩展包可以去这里下载安装:
https://github.com/kfyty/mybatis-auto-mapper
转载自:https://blog.csdn.net/kfyty725/article/details/1048489加粗样式66