html部分
<canvas id="canvas" class="audioCanves"></canvas>
js部分
实例化新对象(path2为音频的路径),
initAu.audioInit();初始化音频
initAu = au(path2);
initAu.audioInit();
可视化音频对象
/**
* 创建可视化音频的函数
* @param src 音频路径
*/
var au = function (src) {
var au = new Object();
au.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;
au.mp3 = src;
au.audio = new Audio(src);
au.ctx = new AudioContext();
au.analyser = au.ctx.createAnalyser();
au.audioSrc = au.ctx.createMediaElementSource(au.audio);
au.audioSrc.connect(au.analyser);
au.analyser.connect(au.ctx.destination);
au.frequencyData = new Uint8Array(au.analyser.frequencyBinCount);
au.canvas = document.getElementById('canvas');
au.cwidth = au.canvas.width;
au.cheight = au.canvas.height - 1;
au.meterWidth = 5;
au.gap = 2;
au.speed=10;
au.capHeight = 1;
au.capStyle = '#fff';
au.meterNum = au.canvas.width/ (10 + 2);
au.capYPositionArray = [];
au.ctx = au.canvas.getContext('2d');
au.gradient = au.ctx.createLinearGradient(0, 0, 0, 300);
au.gradient.addColorStop(1, '#0f0');
au.gradient.addColorStop(0.5, '#ff0');
au.gradient.addColorStop(0, '#f00');
au.time;
au.randerFrame = function () {
var array = new Uint8Array(au.analyser.frequencyBinCount);
au.analyser.getByteFrequencyData(array);
var step = Math.round(array.length / au.meterNum); //sample limited data from the total array
au.ctx.clearRect(0, 0, au.cwidth, au.cheight);
for (var i = 0; i < au.meterNum; i++) {
var value = array[i * step];
if (au.capYPositionArray.length < Math.round(au.meterNum)) {
au.capYPositionArray.push(value);
}
au.ctx.fillStyle = au.capStyle;
if (value < au.capYPositionArray[i]) {
au.ctx.fillRect(i * 12, au.cheight-10 - (--au.capYPositionArray[i]), au.meterWidth, au.capHeight);
}
else {
au.ctx.fillRect(i * 12, au.cheight - value, au.meterWidth, au.capHeight);
au.capYPositionArray[i] = value;
}
au.ctx.fillStyle = au.gradient; //set the filllStyle to gradient for a better look
au.ctx.fillRect(i * 12, au.cheight - value + au.capHeight, au.meterWidth, au.cheight+220); //the meter
}
//获取音频播放的进度
var sumTime = parseInt(au.audio.duration);
if(sumTime==NaN){
au.randerFrame();
}
var currentTime = parseInt(au.audio.currentTime);
var Percentage = (currentTime / sumTime) * 100;
var width = 0.6 * Percentage + "vw";
$(".music-range").val(Percentage);
$("#bar_box").css({
"width": width
});
var timeData = timeFormat(currentTime),timeData1=timeFormat(sumTime);
middle_bot.timeData = timeData+" / "+timeData1;
if(sumTime==currentTime){
clearInterval(au.time);
middle_bot.audioP=false;
$("#playDiv").html(" <span style=\"font-size: 3vw;\" class=\"icon iconfont\"></span>");
}
console.log("sumTime : "+sumTime+" currentTime : "+au.audio.duration);
};
au.audioInit=function(){
au.audio.addEventListener("canplay",function () {
au.randerFrame();
})
};
/**
* 跳转播放
*/
au.seek=function(val){
var sumTime = parseInt(au.audio.duration);
console.log("sumTime : "+sumTime);
var currentTime = parseInt((val / 100) * sumTime);
var width = 0.6 * val + "vw";
$("#bar_box").css({
"width": width
});
au.audio.currentTime=currentTime;
var timeData = timeFormat(currentTime),timeData1=timeFormat(sumTime);
middle_bot.timeData = timeData+" / "+timeData1;
};
/**
* 播放开始
*/
au.audioPlay = function () {
console.log("播放");
au.audio.play();
au.time = setInterval(au.randerFrame, au.speed);
};
/**
* 暂停播放
*/
au.audioPause = function () {
console.log("暂停");
clearInterval(au.time);
au.audio.pause();
};
/**
* 重置
*/
au.audioRest = function () {
au.audio.remove();
};
au.audioChange=function (path) {
au.audio.src=path;
};
return au;
};
第二种:简单的可视化处理
var canvas = document.createElement("canvas");
canvas.width = 100;
canvas.height = 180;
var ctx = canvas.getContext("2d");
ctx.fillStyle = "red";
ctx.strokeStyle = "blue";
ctx.lineCap = "round";
var auctx;
window.onload = () => {
document.body.appendChild(canvas);
auctx = new(window.AudioContext || window.webkitAudioContext)();
startAudio();
}
var buffer, src, analyser, buffLen;
var barWidth, dataArray;
function startAudio() {
var url = "1.mp3";
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
auctx.decodeAudioData(request.response, function(buffer) {
buffer = buffer;
src = auctx.createBufferSource();
src.buffer = buffer;
src.loop = false;
src.connect(auctx.destination);
src.start(0);
analyser = auctx.createAnalyser();
src.connect(analyser);
analyser.connect(auctx.destination);
analyser.fftSize = 256;
buffLen = analyser.frequencyBinCount;
dataArray = new Uint8Array(buffLen);
barWidth = (500 - 2 * buffLen - 4) / buffLen * 2.5;
ctx.lineWidth = barWidth;
draw();
});
}
request.send();
}
function draw() {
ctx.fillRect(0, 0, 500, 180);
analyser.getByteFrequencyData(dataArray);
for (var i = 0; i < buffLen; i++) {
ctx.beginPath();
ctx.moveTo(4 + 2 * i * barWidth + barWidth / 2, 178 - barWidth / 2);
ctx.lineTo(4 + 2 * i * barWidth + barWidth / 2, 178 - dataArray[i] * 0.65 - barWidth / 2);
ctx.stroke();
}
requestAnimationFrame(draw);
}