import org.apache.commons.codec.digest.DigestUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* ClassName: Test_202
* Package: com.srr.media.minio
* Description: 大文件合并
*
* @Author srr
* @Create 2023/3/20 23:34
* @Version 1.0
*/
public class Test_202 {
public static void main(String[] args) throws IOException {
//1.块文件目录
File chunkFolder = new File("C:\\Users\\Administrator\\Desktop\\chunk\\");
//2. 原始文件
File originalFile = new File("C:\\Users\\Administrator\\Desktop\\尚硅谷NodeJS核心基础\\视频\\200_完结篇.mp4");
//3. 合并文件
File mergeFile = new File("\\Users\\Administrator\\Desktop\\200_完结篇---02.mp4");
//4.如果合并文件存在则删除
if (mergeFile.exists()) mergeFile.delete();
//5. 创建合并文件
mergeFile.createNewFile();
//6. 创建读取流,用于写文件
RandomAccessFile raf_write = new RandomAccessFile(mergeFile, "rw");
//7. 调整指针
raf_write.seek(0);
//8. 获取到分块目录下的所有文件
File[] fileArray = chunkFolder.listFiles();
//9. 把分块文件数组转成List并排序(方便按顺序读)
List<File> fileList = Arrays.asList(fileArray);
Collections.sort(fileList, (o1, o2) -> Integer.parseInt(o1.getName()) - Integer.parseInt(o2.getName()));
byte[] buffer = new byte[1024];
//10 循环开始读取每个分块文件到合并文件
for (File file : fileList) {
RandomAccessFile raf_read = new RandomAccessFile(file, "rw");
int len = -1;
while ((len = raf_read.read(buffer)) != -1) {
raf_write.write(buffer, 0, len);
}
raf_read.close();
}
raf_write.close();
//11. 校验文件
try (
FileInputStream origin_Is = new FileInputStream(originalFile);
FileInputStream merge_Is = new FileInputStream(mergeFile);
) {
//12. 原始文件md5值
String o = DigestUtils.md5Hex(origin_Is);
//13. 合并文件md5值
String m = DigestUtils.md5Hex(merge_Is);
//14. 比较
if (o.equals(m)) {
System.out.println("合并文件成功");
} else {
System.out.println("合并文件失败..");
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
断点续传_分块合并测试
最新推荐文章于 2024-07-09 20:46:34 发布