剪辑音频
<div v-if="waveformFlag" id="waveform"></div>
handelReaderNew(){
var that = this
console.log("handelReaderNew")
that.wavesurfer = this.$WaveSurfer.create({
container: '#waveform',
waveColor: 'violet',
progressColor: 'purple'
});
if(that.isTailor){
that.wavesurfer.load(this.videoForm.audioPath);
}else{
let url = URL.createObjectURL(that.tailorMusiceFile);
console.log(url,"转base64")
that.wavesurfer.load(url);
}
that.wavesurfer.on('ready', function () {
that.wavesurfer.play();
});
const wsRegions = that.wavesurfer.registerPlugin(this.$RegionsPlugin.create())
const random = (min, max) => Math.random() * (max - min) + min
const randomColor = () => `rgba(${random(0, 255)}, ${random(0, 255)}, ${random(0, 255)}, 0.5)`
console.log(wsRegions,"this")
that.wavesurfer.on('decode', function () {
wsRegions.addRegion({
start: 0,
end: 20,
content: '截取部分',
color: 'rgba(193, 114, 245, 0.5)',
})
});
let loop = true
that.wavesurfer.on('click', (relativeX) => {
that.wavesurfer.playPause()
loop = false
console.log('Click点击的位置',that.wavesurfer, relativeX)
})
var activeRegion = null
wsRegions.on('region-in', (region) => {
console.log('region-in')
activeRegion = region
})
wsRegions.on('region-out', (region) => {
console.log('region-out')
if (activeRegion === region) {
if (loop) {
region.play()
} else {
activeRegion = null
}
}
})
wsRegions.on('region-clicked', (region, e) => {
console.log('点击区域',region,e)
loop = true
e.stopPropagation()
activeRegion = region
region.play()
region.setOptions({ color: 'rgba(193, 114, 245, 0.5)' })
})
wsRegions.on('region-updated', (region) => {
that.tailorStartTime = region.start
that.tailorendTime = region.end
console.log('范围移动', region)
})
wsRegions.once('decode', () => {
console.log(1310)
})
wsRegions.on('interaction', () => {
console.log('interaction')
activeRegion = null
})
},
fileupload(){
this.handelDestroy()
this.waveformFlag = false
this.waveformFlag = true
var file = this.musiceFile;
let that = this
var reader = new FileReader();
reader.onload = function (e){
console.log(e,"文件1273")
var arrBuffer = event.target.result;
var audioCtx = new AudioContext();
audioCtx.decodeAudioData(arrBuffer, function(audioBuffer) {
let channels = audioBuffer.numberOfChannels;
let rate = audioBuffer.sampleRate;
let startOffset = rate * that.tailorStartTime;
let endOffend = rate * that.tailorendTime;
let frameCount = endOffend - startOffset;
console.log('截取起点:',startOffset,'截取终点:',endOffend,that.tailorendTime,'截取时长:',frameCount,"截取声道数:",channels)
let newAudioBuffer = new AudioContext().createBuffer(channels, frameCount, rate);
let tempArray = new Float32Array(frameCount);
let offSet = 0;
for (let index = 0; index < channels; index++) {
audioBuffer.copyFromChannel(tempArray, index, startOffset);
newAudioBuffer.copyToChannel(tempArray, index, offSet);
}
console.log(audioBuffer,newAudioBuffer)
var myArrayBuffer = audioCtx.createBuffer(2, frameCount, audioCtx.sampleRate);
var anotherArray = new Float32Array();
myArrayBuffer.copyFromChannel(anotherArray,1,0);
let num = Math.trunc(frameCount)
var blob = that.bufferToWave(newAudioBuffer, num);
that.tailorMusiceFile = blob
that.isTailor = false
that.handelReaderNew()
that.tailorMusiceFile = new File([blob],'muice.mp3',{type:blob.type})
console.log(newAudioBuffer,audioBuffer,blob,that.tailorMusiceFile,)
that.handleHttpRequestFucNew({file:that.tailorMusiceFile},2)
});
}
reader.readAsArrayBuffer(file)
},
getImageBase64(blob) {
return new Promise((resolve,reject) => {
const reader = new FileReader();
reader.readAsDataURL(blob);
reader.onload = () => {
const base64 = reader.result;
resolve(base64);
}
reader.onerror = error => reject(error);
});
},
bufferToWave(abuffer, len) {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [], i, sample,
offset = 0,
pos = 0;
setUint32(0x46464952);
setUint32(length - 8);
setUint32(0x45564157);
setUint32(0x20746d66);
setUint32(16);
setUint16(1);
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan);
setUint16(numOfChan * 2);
setUint16(16);
setUint32(0x61746164);
setUint32(length - pos - 4);
for(i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while(pos < length) {
for(i = 0; i < numOfChan; i++) {
sample = Math.max(-1, Math.min(1, channels[i][offset]));
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767)|0;
view.setInt16(pos, sample, true);
pos += 2;
}
offset++
}
return new Blob([buffer], {type: "audio/wav"});
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
},