在此记录一下优化历程,从复杂到简洁,从慢到快
FileReader
网上搜到的方法大多数使用FileReader来实现
const file = '../test.txt'
const reader = new FileReader()
reader.readAsText(file, "UTF-8")
let dataArr = []
reader.onload = (evt) => {
const fileString = evt.target.result
const count = fileString.trim().split('\n').length
for (let index = 1; index < count; index++) {
const fileline = fileString.split("\r\n")[index].split(",")[0]
const filelineNumber = (fileline.split(/\s+/)).map(Number)
dataArr.push(filelineNumber)
}
}
ajax+FileReader
感觉和上面那个没有本质的区别,都很慢
urlToBlob(file_url) {
return new Promise(function (resolve, reject){
let xhr = new XMLHttpRequest();
xhr.open("get", file_url, true);
xhr.responseType = "blob";
xhr.onload = function () {
if (this.status == 200) {
const reader = new FileReader()
reader.onload = function () {
resolve(reader.result)
}
reader.readAsText(this.response);
}else{
console.log('err');
}
};
xhr.send();
})
},
this.urlToBlob('/static/test.txt').then(res => {
const fileString = res
// console.log('原始test文件',res);
const count = fileString.trim().split('\n').length
for (let index = 1; index < count; index++) {
const fileline = fileString.split("\r\n")[index].split(",")[0]
const lineValue = fileString.split("\r\n")[index].split(",")[1]
}
})
fetch
实测这里是最快的,而且最简洁,我有15w条数据,几乎瞬间处理完
//说明还没有存储过,需要先请求,后存储,再返回
let resMesh = await fetch(txtUrl).then(res => res.text())
// 处理点文件
let pointRows = resMesh.split("\n");
for (let i = 1; i < pointRows.length-1; i++) {
let point = pointRows[i];
let p = point.split(" ");//这里是分割每一行数据,逗号就写逗号,空格就空格
//这里已经处理好了,p[0],p[1],p[2]...就是我们获取到的每一行数据
}