1 音频处理库
Howler.js
Howler.js是一个专注于Web音频处理的开源JS库
主要用于解决浏览器间音频处理的兼容性问题,并提供高级功能如音效、3D空间音频和跨源音频加载等
安装
npm install howler --save
案列
播放 暂停 停止 上一首 下一首 进度条 控制音量功能
如下所示:vue3
<template>
<div>
<!-- 控制按钮 -->
<button @click="play">播放</button>
<button @click="pause">暂停</button>
<button @click="stop">停止</button>
<button @click="prev">上一首</button>
<button @click="next">下一首</button>
<!-- 进度条 -->
<input type="range" min="0" :max="duration" v-model="currentTime" @input="seek">
<span>{{ formatTime(currentTime) }} / {{ formatTime(duration) }}</span>
<!-- 音量控制 -->
<br>
<label for="volume">音量:</label>
<input id="volume" type="range" min="0" max="1" step="0.01" v-model="volume" @input="setVolume">
</div>
</template>
<script lang="ts">
import { ref, onMounted, watch, onUnmounted } from 'vue';
import { Howl, Howler } from 'howler';
export default {
setup() {
// 定义音频文件路径数组
const audioFiles = ref([
'../src/audio/0.mp3',
'../src/audio/01.wav',
'../src/audio/0.mp3'
]);
// 当前播放曲目索引
let currentTrackIndex = ref(0);
// Howl 实例引用
const sound = ref<Howl | null>(null);
// 播放进度相关变量
const currentTime = ref(0); // 当前时间(秒)
const duration = ref(0); // 总时长(秒)
// 播放状态标志
const isPlaying = ref(false);
// 音量控制
const volume = ref(0.5); // 默认音量设置为50%
// 加载指定索引的音频文件
const loadTrack = (index: number) => {
if (sound.value) {
sound.value.unload(); // 卸载当前音频
}
// 创建新的 Howl 实例
sound.value = new Howl({
src: [audioFiles.value[index]], // 设置音频源
html5: true, // 强制使用 HTML5 <audio> 元素
volume: volume.value, // 设置初始音量
onload: () => {
console.log('Track loaded');
// 在音频加载完成后设置总时长
duration.value = sound.value.duration();
},
onplay: () => {
isPlaying.value = true;
updateProgress(); // 开始更新进度
},
onpause: () => {
isPlaying.value = false;
},
onstop: () => {
isPlaying.value = false;
},
onend: () => {
next(); // 播放结束自动切换到下一首
},
onplayerror: (id, error) => {
console.error('Play error:', error);
},
onloaderror: (id, error) => {
console.error('Load error:', error);
}
});
};
// 播放音频
const play = () => {
if (sound.value && !isPlaying.value) {
sound.value.play();
}
};
// 暂停音频
const pause = () => {
if (sound.value) {
sound.value.pause();
}
};
// 停止音频
const stop = () => {
if (sound.value) {
sound.value.stop();
currentTime.value = 0; // 重置当前时间
}
};
// 切换至上一首
const prev = () => {
currentTrackIndex.value = (currentTrackIndex.value - 1 + audioFiles.value.length) % audioFiles.value.length;
loadTrack(currentTrackIndex.value);
play();
};
// 切换至下一首
const next = () => {
currentTrackIndex.value = (currentTrackIndex.value + 1) % audioFiles.value.length;
loadTrack(currentTrackIndex.value);
play();
};
// 根据用户拖动进度条调整播放位置
const seek = () => {
if (sound.value) {
sound.value.seek(currentTime.value);
}
};
// 根据用户输入调整音量
const setVolume = () => {
if (sound.value) {
sound.value.volume(volume.value);
}
};
// 更新播放进度
const updateProgress = () => {
if (sound.value && isPlaying.value) {
const seekPosition = sound.value.seek();
if (typeof seekPosition === 'number') {
currentTime.value = seekPosition;
}
requestAnimationFrame(updateProgress); // 下一帧继续更新
}
};
// 格式化时间显示
const formatTime = (seconds: number) => {
const date = new Date(seconds * 1000);
return date.toISOString().substr(11, 8);
};
// 组件挂载时加载第一首歌曲
onMounted(() => {
loadTrack(currentTrackIndex.value);
});
// 组件卸载时清理 Howl 实例
onUnmounted(() => {
if (sound.value) {
sound.value.unload();
}
});
// 监视 currentTime 确保其不会超过 duration
watch(currentTime, (newVal) => {
if (newVal > duration.value) {
currentTime.value = duration.value;
}
});
// 返回所有需要暴露给模板的方法和变量
return {
play,
pause,
stop,
prev,
next,
currentTime,
duration,
seek,
formatTime,
volume,
setVolume
};
}
};
</script>
<style scoped>
/* Add your styles here */
</style>
需要处理滑动条样式的话 css里处理 如下链接
自定义音频播放样式
2 音波处理库
Wavesurfer.js
wavesurfer.js是一个用于在Web浏览器中实时可视化音频波形的JavaScript库
支持多种音频文件格式,包括MP3、WAV、AAC和Ogg等,
并且可以通过调用相应的加载方法加载任何支持的音频文件
支持播放、暂停、跳转、缩放、选择和剪切等操作
允许用户自定义波形的外观和样式,包括颜色、阴影效果、进度条和标记
npm install wavesurfer.js --save
初始化展示常规音波
初始化展示音波 点击启动对应音波滚动 点击暂时停止滚动
<template>
<div>
<div id="waveform" style="width:800px;height:120px" ></div>
<el-button type="primary" @click="playPause" :loading="isLoading">
{{ isLoading ? 'Loading...' : isPlaying ? '暂停' : '启动' }}
</el-button>
<p v-if="error">{{ error }}</p>
</div>
</template>
<script setup lang="ts">
import { ref, onMounted, onBeforeUnmount } from 'vue';
import WaveSurfer from 'wavesurfer.js';
let wavesurfer: WaveSurfer | null = null;
const isLoading = ref(false);
const isPlaying = ref(false);
const error = ref<string | null>(null);
function initWaveSurfer() {
isLoading.value = true;
error.value = null;
wavesurfer = WaveSurfer.create({
container: '#waveform', // 绑定容器
url: '../public/01.wav', // 音频地址,使用的是 Vite,将文件放到根目录 public 下才有用!!!
autoCenter: false, // 不自动播放
// waveColor: '#4F4A85',
// progressColor: '#383351',
waveColor: 'violet',
progressColor: 'blue',
backend: 'MediaElement', // 使用 MediaElement 后端来确保兼容性
responsive: true,
});
wavesurfer.on('ready', () => {
isLoading.value = false;
});
wavesurfer.on('error', (err) => {
isLoading.value = false;
error.value = `Failed to load audio: ${err.message}`;
});
wavesurfer.on('play', () => {
isPlaying.value = true;
});
wavesurfer.on('pause', () => {
isPlaying.value = false;
});
}
// 播放暂停方法
const playPause = () => {
if (wavesurfer) {
wavesurfer.playPause();
}
};
onMounted(() => {
initWaveSurfer();
});
onBeforeUnmount(() => {
if (wavesurfer) {
wavesurfer.destroy();
wavesurfer = null;
}
});
</script>
<style scoped>
/* Add any styles you need here */
</style>
初始化柱状展示音波
<template>
<div>
<div id="waveform" style="width:800px;height:120px"></div>
<el-button type="primary" @click="playPause" :loading="isLoading">
{{ isLoading ? 'Loading...' : isPlaying ? 'Pause' : 'Play' }}
</el-button>
<p v-if="error">{{ error }}</p>
</div>
</template>
<script setup lang="ts">
import { ref, onMounted, onBeforeUnmount } from 'vue';
import WaveSurfer from 'wavesurfer.js';
let wavesurfer: WaveSurfer | null = null;
const isLoading = ref(false);
const isPlaying = ref(false);
const error = ref<string | null>(null);
function initWaveSurfer() {
isLoading.value = true;
error.value = null;
// 初始化 Wavesurfer 并设置 renderer 为 bar
wavesurfer = WaveSurfer.create({
container: '#waveform', // 绑定容器
url: '/01.wav', // 音频地址,使用的是 Vite,将文件放到根目录 public 下才有用!!!
backend: 'MediaElement', // 使用 MediaElement 后端来确保兼容性
waveColor: 'violet',
progressColor: 'blue',
cursorWidth: 1,
responsive: true,
height: 120,
barWidth: 4, // 柱子宽度
barGap: 2, // 柱子之间的间距
normalize: true, // 归一化音频数据
hideScrollbar: true,
render: 'bar' // 设置渲染模式为 bar
});
wavesurfer.on('ready', () => {
isLoading.value = false;
});
wavesurfer.on('error', (err) => {
isLoading.value = false;
error.value = `Failed to load audio: ${err.message}`;
});
wavesurfer.on('play', () => {
isPlaying.value = true;
});
wavesurfer.on('pause', () => {
isPlaying.value = false;
});
}
// 播放暂停方法
const playPause = () => {
if (wavesurfer) {
wavesurfer.playPause();
}
};
onMounted(() => {
initWaveSurfer();
});
onBeforeUnmount(() => {
if (wavesurfer) {
wavesurfer.destroy();
wavesurfer = null;
}
});
</script>
<style scoped>
/* Add any styles you need here */
#waveform {
width: 800px;
height: 120px;
}
</style>
交互点击启动音波
<template>
<div>
<div id="waveform" style="width:800px;height:200px"></div>
<el-button type="primary" @click="initializeAndPlay" :loading="isLoading">
{{ isLoading ? 'Loading...' : isPlaying ? 'Pause' : 'Play' }}
</el-button>
<p v-if="error">{{ error }}</p>
</div>
</template>
<script setup lang="ts">
import { ref, onMounted, onBeforeUnmount } from 'vue';
import WaveSurfer from 'wavesurfer.js';
let wavesurfer: WaveSurfer | null = null;
const isLoading = ref(false);
const isPlaying = ref(false);
const error = ref<string | null>(null);
function initWaveSurfer() {
// 初始化 Wavesurfer
wavesurfer = WaveSurfer.create({
container: '#waveform', // 绑定容器
url: '/01.wav', // 音频地址,使用的是 Vite,将文件放到根目录 public 下才有用!!!
backend: 'MediaElement', // 使用 MediaElement 后端来确保兼容性
waveColor: 'violet',
progressColor: 'blue',
cursorWidth: 1,
responsive: true,
height: 100, // 增加高度以适应纵向柱状图
normalize: true, // 归一化音频数据
hideScrollbar: true,
barWidth: 4, // 柱子宽度
barGap: 2, // 柱子之间的间距
render: 'bar' // 设置渲染模式为 bar
});
wavesurfer.on('ready', async () => {
isLoading.value = false;
// 尝试恢复 AudioContext
const audioContext = wavesurfer.backend.getAudioContext();
if (audioContext && audioContext.state === 'suspended') {
try {
await audioContext.resume(); // 等待恢复完成
} catch (err) {
console.error('Failed to resume AudioContext:', err);
}
}
drawVerticalWaveform();
});
wavesurfer.on('error', (err) => {
isLoading.value = false;
error.value = `Failed to load audio: ${err.message}`;
});
wavesurfer.on('play', () => {
isPlaying.value = true;
});
wavesurfer.on('pause', () => {
isPlaying.value = false;
});
}
function drawVerticalWaveform() {
if (!wavesurfer || !wavesurfer.backend) return;
const peaks = wavesurfer.backend.getPeaks(800); // 获取波峰数据
const canvasHeight = 400; // 与 #waveform 的高度一致
const canvasWidth = 800;
const barHeight = canvasHeight / peaks.length; // 每个柱子的高度
const spacing = 1; // 柱子之间的间距
const canvas = document.createElement('canvas');
canvas.width = barHeight;
canvas.height = canvasWidth;
canvas.style.position = 'absolute';
canvas.style.left = '0';
canvas.style.top = '0';
canvas.style.transform = 'rotate(-90deg)'; // 旋转 canvas 以实现纵向柱状图
canvas.style.transformOrigin = 'top left';
document.getElementById('waveform')?.appendChild(canvas);
const ctx = canvas.getContext('2d');
if (ctx) {
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (let i = 0; i < peaks.length; i++) {
const peak = peaks[i];
const barWidth = Math.abs(peak) * canvasWidth / 2;
const x = canvasHeight - (i + 1) * barHeight + spacing;
const y = canvasWidth / 2 - barWidth / 2;
ctx.fillStyle = `rgb(${Math.abs(peak * 255)}, 50, 50)`; // 根据峰值调整颜色
ctx.fillRect(x, y, barHeight - spacing * 2, barWidth); // 绘制柱子
}
}
}
// 播放暂停方法
const initializeAndPlay = async () => {
if (!wavesurfer) {
initWaveSurfer();
}
if (wavesurfer) {
wavesurfer.playPause();
}
};
onBeforeUnmount(() => {
if (wavesurfer) {
wavesurfer.destroy();
wavesurfer = null;
}
});
</script>
<style scoped>
/* Add any styles you need here */
#waveform {
position: relative;
width: 800px;
height: 400px;
}
</style>
上传文件后显示常规音波
<template>
<div id="app">
<input type="file" @change="onFileChange" />
<button @click="playAudio" class="audio-button">播放</button>
<button @click="pauseAudio" class="audio-button">暂停</button>
<div id="waveform"></div>
</div>
</template>
<script>
import Wavesurfer from "wavesurfer.js";
export default {
data() {
return {
file: null,
waveform: null,
wavesurfer: null,
isPlaying: false,
};
},
methods: {
onFileChange(e) {
this.file = e.target.files[0];
this.loadWaveform();
},
loadWaveform() {
if (this.wavesurfer) {
this.wavesurfer.destroy();
}
const reader = new FileReader();
reader.onload = (e) => {
this.wavesurfer = Wavesurfer.create({
container: document.getElementById("waveform"),
waveHeight: 100,
progressColor: "#d384fe", // 这将设置进度条的颜色为播放按钮的颜色
cursorColor: "#d0bdda",
hideScrollbar: true,
responsive: true,
waveColor: "#bfb0fb", // 这将设置音量图的颜色为按钮的颜色
});
this.wavesurfer.load(e.target.result);
};
reader.readAsDataURL(this.file);
},
playAudio() {
if (!this.isPlaying) {
this.wavesurfer.play();
this.isPlaying = true;
}
},
pauseAudio() {
if (this.isPlaying) {
this.wavesurfer.pause();
this.isPlaying = false;
}
},
},
};
</script>
<style scoped>
.audio-button {
font-size: 30px;
border: 1px solid red;
}
</style>
录音并展示音频波形效果
wavesurfer.js必须在https下进行使用,不然浏览器不会申请麦克风权限
wavesurfer.js+Record插件(包含在wavesurfer的里面插件)
<template>
<div>
<div ref="waveform"></div>
<button @click="startRecording">开始录音</button>
<button @click="stopRecording">停止录音</button>
</div>
</template>
<script>
import WaveSurfer from 'wavesurfer.js';
import RecordPlugin from 'wavesurfer.js/dist/plugins/record.esm';
//
export default {
data() {
return {
wavesurfer: null,
record: null,
recordedUrl: '',
recordedBlobType: '',
};
},
mounted() {
this.init();
},
beforeDestroy() {
if (this.wavesurfer) {
this.wavesurfer.destroy();
}
},
methods: {
async init() {
this.wavesurfer = WaveSurfer.create({
container: this.$refs.waveform,
waveColor: '#FCB28F', // 设置波形颜色
progressColor: '#FCB28F', // 设置进度条颜色
height: 150, // 设置波形图高度
barWidth: 10, // 设置条形宽度
barRadius: 2, // 设置条形圆角
cursorWidth: 0, // 设置光标宽度
});
// 设置录音选项
this.record = this.wavesurfer.registerPlugin(
RecordPlugin.create({
scrollingWaveform: false,
})
);
this.record.on('record-end', (blob) => {
this.recordedUrl = URL.createObjectURL(blob);
this.recordedBlobType = blob.type.split(';')[0].split('/')[1] || 'webm';
});
this.record.on('record-progress', (time) => {
console.log('更新进度条', time);
});
},
// 开始录音
startRecording() {
this.record.startRecording().then(() => {
console.log('开始录音');
});
},
// 停止结束录音
stopRecording() {
this.record.stopRecording();
},
},
};
</script>
<style>
.waveform {
width: 100%;
height: 128px;
}
</style>
audio+wavesurfer.js
<template>
<div id="waveform" ref="waveform"></div>
<audio
:src="audioSrc1"
controls
class="audio4"
@play="handlePlay"
@pause="handlePause"
@seeking="handleSeeking"
controlsList="noplaybackrate nodownload"
></audio>
</template>
<script setup>
import { ref, onMounted, onBeforeUnmount } from 'vue';
import WaveSurfer from 'wavesurfer.js';
const waveform = ref(null);
const audioSrc1 = ref('../20250109.wav'); // 确保这里是你音频文件的真实URL
let wavesurfer = null;
onMounted(() => {
wavesurfer = WaveSurfer.create({
container: waveform.value,// 需要的容器盒子
scrollParent: false, // 是否出现滚动条
cursorColor: '#fff',// 播放进行时线条颜色
cursorWidth: 2, // 播放进行时线条宽度
waveColor: '#ccc', // 未播放的波纹颜色
progressColor: '#50f0a6', // 已播放的波纹颜色
backgroundColor: "#000",// 背景色
height: 130, //频谱的高度
audioRate: 1, //频谱的高度
});
wavesurfer.load(audioSrc1.value); // 加载音频文件
});
// 播放音频
const handlePlay = () => {
const audioElement = document.querySelector('.audio4');
audioElement.muted = true;
wavesurfer.play();
};
// 暂停
const handlePause = () => {
wavesurfer.pause();
};
// 拖动进度条时触发
const handleSeeking = () => {
const audioElement = document.querySelector('.audio4');
//使wavesurfer跳到进度条拖动的位置(audio当前时间 - wavesurfer当前时间 = 需要跳转的时间)
wavesurfer.skip(audioElement.currentTime - wavesurfer.getCurrentTime());
};
// 关闭
const handleClose = () => {
const audioElement = document.querySelector('.audio4');
audioElement.currentTime = 0;
audioElement.pause();
wavesurfer.destroy();//停止播放并回到起始点
};
onBeforeUnmount(() => {
if (wavesurfer) {
wavesurfer.destroy();
}
});
</script>
<style scoped>
#waveform {
width: 100%;
background: #1e221f;
pointer-events: none;
}
</style>
peaks.js
npm install --save peaks.js
npm install --save konva
npm install --save waveform-data
初始化option中,只需要修改mediaElement中的dom就可以展示音频或视频波形图
<template>
<div>
<!-- <video
ref="video"
id="video"
width="100%"
height="100%"
crossOrigin="Anonymous"
src=""
/> -->
<div id="peak-container">
<div id="zoomview-container" ref="zoomview"></div>
<div id="overview-container" ref="overview"></div>
</div>
<div id="demo-controls">
<audio ref="audio" id="audio" controls="controls">
<source src="../public/01.wav" type="audio/mpeg" />
</audio>
</div>
<div>
<el-button @click="delel">销毁</el-button>
<el-button @click="play">播放/暂停</el-button>
缩略图展示的时间区间
<el-button @click="big" id="big" ref="big">收起</el-button>
<el-button @click="small" id="small" ref="small">展开</el-button>
<el-button id="seek" ref="resetPaly" @click="resetPaly">重置</el-button>
波幅调节
<el-slider
vertical
height="200px"
@input="input"
v-model="scaleValue"
id="amplitude-scale"
:format-tooltip="formatTooltip"
:step="10"
></el-slider>
<el-button @click="zoomable">显示/隐藏可缩放波形图</el-button>
</div>
</div>
</template>
<script>
import Peaks from "peaks.js";
const amplitudeScales = {
0: 0.0,
1: 0.1,
2: 0.25,
3: 0.5,
4: 0.75,
5: 1.0,
6: 1.5,
7: 2.0,
8: 2.5,
9: 3.0,
10: 3.5,
};
export default {
data() {
return {
playPause: true, //true 播放 false 暂停
peaks: "",
// 只能将其隐藏,dom元素需要保留
dispaly: false,
scaleValue: 50,
};
},
created() {
this.$nextTick(() => {
const options = {
zoomview: {
container: this.$refs.zoomview,
},
overview: {
container: this.$refs.overview,
},
// 只需要修改dom就可以展示音频或视频波形图
mediaElement: this.$refs.audio,
webAudio: {
audioContext: new AudioContext(),
},
emitCueEvents: true,
// //显示轴标签
showAxisLabels: true,
};
Peaks.init(options, (err, peaks) => {
if (err) {
console.log("messge " + err);
return;
}
this.peaks = peaks;
});
});
},
methods: {
delel() {
this.peaks.destroy();
//必须将其src清空或audio销毁,不然音频任可以播放
this.$refs.audio.src = "";
},
play() {
if (this.playPause) {
this.peaks.player.play();
this.playPause = false;
} else {
this.peaks.player.pause();
this.playPause = true;
}
},
big() {
console.log(this.peaks);
this.peaks.zoom.zoomIn();
console.log("收起");
},
small() {
this.peaks.zoom.zoomOut();
console.log("展开");
},
resetPaly() {
this.peaks.player.seek(0.0);
console.log("重置");
// }
},
input(value) {
let scale = amplitudeScales[value / 10];
// 隐藏缩略图时候进行处理
if (this.peaks.views.getView("zoomview")) {
this.peaks.views.getView("zoomview").setAmplitudeScale(scale);
}
this.peaks.views.getView("overview").setAmplitudeScale(scale);
},
formatTooltip(val) {
return val / 10;
},
zoomable() {
let container = document.getElementById("zoomview-container");
let zoomview = this.peaks.views.getView("zoomview");
if (zoomview) {
this.peaks.views.destroyZoomview();
container.style.display = "none";
} else {
container.style.display = "block";
this.peaks.views.createZoomview(container);
}
},
},
};
</script>
<style>
#zoomview-container {
height: 300px;
}
#overview-container {
height: 300px;
}
</style>
canvas
实现点击显示音波效果
vue3+ts
<template>
<div class="audioPage" ref="audioPageRef" @click="palyMusic()">
<canvas ref="canvasRef" width="100" height="100"></canvas>
<audio ref="audioRef">
<source src="../public/01.wav" />
</audio>
</div>
</template>
<script setup lang="ts">
import { onMounted, ref } from "vue";
const audioRef = ref();
const canvasRef = ref();
const audioPageRef = ref();
//需要可视化的数据
let audioArr: Uint8Array = new Uint8Array();
//我们需要一个音乐播放器和一个canvas进行显示
const radius = 20; // 圆的半径
let analyser: any = null;
//初始化
let isInit = false;
onMounted(() => {
drawGradientCircle(); // 绘制圆形
// 当音乐播放
audioRef.value.onplay = () => {
if (!isInit) {
isInit = true;
}
const audioContext = new AudioContext(); // 创建音频上下文
const audioSrc = audioContext.createMediaElementSource(audioRef.value); // 创建音频源
analyser = audioContext.createAnalyser(); // 创建分析器
analyser.fftSize = 64; // 设置傅里叶变换的大小,影响线条密度
audioArr = new Uint8Array(analyser.frequencyBinCount); // 创建一个无符号字节数组存储频率数据,该API参考ES6文档
audioSrc.connect(analyser); // 连接音频源和分析器
analyser.connect(audioContext.destination); // 连接分析器和音频目的地
//动画开始
animate();
};
});
// 绘制圆形
const drawGradientCircle = () => {
const ctx = canvasRef.value.getContext("2d");
const centerX = canvasRef.value.width / 2;
const centerY = canvasRef.value.height / 2;
ctx.beginPath();
// 创建一个从中心点向外的径向渐变
const grd = ctx.createLinearGradient(
centerX - radius,
centerY - radius,
centerX + radius,
centerY + radius
);
grd.addColorStop("0", "purple");
grd.addColorStop("0.3", "magenta");
grd.addColorStop("0.5", "blue");
grd.addColorStop("0.6", "green");
grd.addColorStop("0.8", "yellow");
grd.addColorStop(1, "red");
ctx.strokeStyle = grd;
ctx.arc(centerX, centerY, radius - 2, 0, Math.PI * 2); // 绘制一个完整的圆
ctx.stroke(); // 画圆复制代码
};
//绘制线条
const drawLinesFromCircle = () => {
const ctx = canvasRef.value.getContext("2d");
const centerX = canvasRef.value.width / 2;
const centerY = canvasRef.value.height / 2;
ctx.lineWidth = 2;
//使用音频的频率数据绘制线条
//为了美观,我们绘制两条线,一条是频率数据,另一条是对称的
//也可以去使用其它的方式绘制线条
audioArr.forEach((value, index) => {
const baseAngle = (index / audioArr.length) * Math.PI * 2; // 基础角度
const angle1 = baseAngle; // 第一条线的角度
const angle2 = baseAngle + Math.PI; // 对称线的角度,相差π(180度)
// 绘制第一条线
{
const endX1 = centerX + radius * Math.cos(angle1);
const endY1 = centerY + radius * Math.sin(angle1);
const startX1 = centerX + (radius + value * 0.1) * Math.cos(angle1); // 使用value控制长度
const startY1 = centerY + (radius + value * 0.1) * Math.sin(angle1);
ctx.beginPath();
ctx.moveTo(startX1, startY1);
ctx.lineTo(endX1, endY1);
ctx.strokeStyle = `hsl(${index * 3.6}, 100%, 50%)`;
ctx.stroke();
}
// 绘制对称的第二条线
{
const endX2 = centerX + radius * Math.cos(angle2);
const endY2 = centerY + radius * Math.sin(angle2);
const startX2 = centerX + (radius + value * 0.1) * Math.cos(angle2);
const startY2 = centerY + (radius + value * 0.1) * Math.sin(angle2);
ctx.beginPath();
ctx.moveTo(startX2, startY2);
ctx.lineTo(endX2, endY2);
ctx.strokeStyle = `hsl(${
(index + audioArr.length / 2) * 3.6
}, 100%, 50%)`; // 调整颜色以保持对称性且有所区别
ctx.stroke();
}
});
};
//播放动画
const animate = () => {
const ctx = canvasRef.value.getContext("2d");
ctx.clearRect(0, 0, canvasRef.value.width, canvasRef.value.height); // 清除画布
if (!isInit) return;
analyser.getByteFrequencyData(audioArr); // 获取频率数据
drawGradientCircle(); // 绘制圆形
drawLinesFromCircle(); // 绘制伸展的线条
requestAnimationFrame(animate); // 重复绘制以创建动画效果
};
const palyMusic = () => {
if (audioRef.value.paused) {
audioRef.value.play();
} else {
audioRef.value.pause();
}
};
</script>
<style scoped lang="scss">
.audioPage {
width: 100%;
height: 100%;
display: flex;
justify-content: center;
align-items: center;
cursor: pointer;
}
</style>
vue+canvas音频音波可视化
引入本地音频文件需放在public文件夹下
public文件夹与src文件夹同级
Vue2 + canvas
<template>
<div class="subGuide">
<canvas id="canvas"></canvas>
<br>
<audio id="audio" src="../public/01.wav" controls></audio>
//src对应你的目录音频文件 (使用本地文件要创建public文件夹对应文件放在里面引入)
</div>
</template>
<script>
export default {
name: 'subGuide',
data() {
return {
}
},
mounted() {
const audioEle = document.querySelector('audio')
const cvs = document.querySelector('canvas')
const ctx = cvs.getContext('2d')
function initCvs() {
cvs.width = (window.innerWidth / 2) * devicePixelRatio
cvs.height = (window.innerHeight / 3) * devicePixelRatio
}
initCvs()
let isInit = false
let dateArray = null
let analyser = null
audioEle.addEventListener('play', function (e) {
if (isInit) return
const audCtx = new AudioContext()
const source = audCtx.createMediaElementSource(audioEle)
analyser = audCtx.createAnalyser()
analyser.fftSize = 512
dateArray = new Uint8Array(256)
source.connect(analyser)
analyser.connect(audCtx.destination)
isInit = true
})
function draw() {
requestAnimationFrame(draw)
const { width, height } = cvs
ctx.clearRect(0, 0, width, height)
if (!isInit) return
analyser.getByteFrequencyData(dateArray)
const len = dateArray.length / 2.5
ctx.fillStyle = '#266fff'
const barWidth = width / len / 2
for (let i = 0; i < len; i++) {
const data = dateArray[i]
const barHeight = (data / 255) * height
const x1 = i * barWidth + width / 2
const x2 = width / 2 - (i + 1) * barWidth
const y = height - barHeight
ctx.fillRect(x1, y, barWidth - 2, barHeight)
ctx.fillRect(x2, y, barWidth - 2, barHeight)
}
}
draw()
},
methods: {
}
}
</script>
<style lang="scss" scoped></style>
Vue3 + canvas
<template>
<div class="audio">
<canvas ref="canvasRef"></canvas>
<audio ref="aduioRef" controls @play="playAudio">
<!-- <source src="../src/audio/0.mp3" type="audio/mp3" /> -->
<source src="../public/01.wav" type="audio/ogg" />
<p>你的浏览器不支持该音频</p>
</audio>
</div>
</template>
//
<script setup lang="ts">
import { ref, onMounted, Ref } from 'vue';
const canvasRef: Ref<HTMLCanvasElement> = ref(document.getElementsByTagName('canvas')[0]);
const aduioRef = ref<HTMLAudioElement | null>(null);
const ctx = ref<any>(null);
const audioContext = ref<any>(null);
const source = ref<any>(null);
const analyser = ref<any>(null);
const dataArray = ref<Uint8Array>(new Uint8Array());
onMounted(() => {
ctx.value = canvasRef.value && canvasRef.value.getContext('2d');
initCanvas();
});
// 初始化canvas
const initCanvas = () => {
canvasRef.value.width = 600;
canvasRef.value.height = 400;
}
const playAudio = () => {
// 音频源->音频节点->分析节点(输出到canvas里面)->输出节点(播放)
audioContext.value = new AudioContext();
source.value = audioContext.value.createMediaElementSource(aduioRef.value);
analyser.value = new AnalyserNode(audioContext.value);
source.value.connect(analyser.value);
analyser.value.connect(audioContext.value.destination);
analyser.value.fftSize = 512;
const bufferLength = analyser.value.fftSize;
dataArray.value = new Uint8Array(bufferLength / 2);
draw();
}
const draw = () => {
requestAnimationFrame(draw);
ctx.value.clearRect(0, 0, canvasRef.value.width, canvasRef.value.height);
analyser.value.getByteFrequencyData(dataArray.value);
const len=dataArray.value.length/3;
const barW = canvasRef.value.width / len;
for (var i = 0; i < len; i++) {
const x = i * barW;
const barH = dataArray.value[i] / 255 * canvasRef.value.height;
const y = canvasRef.value.height - barH;
ctx.value.fillStyle = 'rgb(200, 200, 200)';
ctx.value.fillRect(x, y, barW-2, barH);
}
}
</script>
<style scoped>
.audio {
height: 100%;
width: 100%;
}
canvas {
border: 1px solid black;
}
</style>
Vue3 + canvas 包含动态颜色效果
<template>
<div>
<audio ref="audioElement" :src="audioSrc" controls></audio>
<canvas ref="canvasElement" width="800" height="400" style="background-color: antiquewhite;"></canvas>
</div>
</template>
<script setup lang="ts">
import { onMounted, ref, onUnmounted } from "vue";
import audioSrc from "../audio/0.mp3";//对应音频文件
const audioElement = ref<HTMLAudioElement | null>(null);
const canvasElement = ref<HTMLCanvasElement | null>(null);
onMounted(() => {
const audio = audioElement.value;
if (!audio) {
console.error("Audio element not found");
return;
}
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
if (!audioCtx) {
console.error("Audio context not supported");
return;
}
const source = audioCtx.createMediaElementSource(audio);
const analyser = audioCtx.createAnalyser();
analyser.fftSize = 2048; // 设置FFT大小,影响频率分辨率
source.connect(analyser);
analyser.connect(audioCtx.destination);
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
const canvas = canvasElement.value;
if (!canvas) {
console.error("Canvas element not found");
return;
}
const canvasCtx = canvas.getContext("2d");
if (!canvasCtx) {
console.error("Canvas context not found");
return;
}
function draw() {
requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray); // 使用频率数据而不是时间域数据
canvasCtx.clearRect(0, 0, canvas.width, canvas.height); // 清除画布
const barWidth = (canvas.width / bufferLength) * 2.5; // 设置条宽度
let barHeight;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
const color = `rgb(${barHeight + 50}, 50, ${200 - barHeight})`; // 动态颜色
canvasCtx.fillStyle = color;
canvasCtx.fillRect(x, canvas.height - barHeight / 2, barWidth, barHeight / 2);
x += barWidth + 1;
}
}
audio.addEventListener("play", draw); // 当音频播放时开始绘制
// 清理资源
onUnmounted(() => {
audio.removeEventListener("play", draw);
source.disconnect(analyser);
analyser.disconnect(audioCtx.destination);
audioCtx.close().then(() => console.log("Audio context closed"));
});
});
</script>
<style scoped>
canvas {
border: 1px solid #000; /* 添加边框 */
margin-top: 10px; /* 添加顶部外边距 */
}
</style>
siriwave.js
SiriWave 是一个开源的JavaScript库,它完美地复刻了苹果Siri在屏幕上的动态波纹动画
实现了两种风格的波形:经典的iOS样式和iOS9引入的荧光风格。
通过调整速度、振幅、频率等参数,你可以定制出适合各种场景的波纹效果。
此外,该项目还支持自定义颜色、曲线定义和随机范围
IOS当中Siri波形效果
npm install siriwave --save
<template>
<div>
<!-- 音频标签 -->
<audio ref="audio" controls @play="onPlay" @pause="onPause" @timeupdate="onTimeUpdate">
<source src="../audio/0.mp3" type="audio/mpeg">
Your browser does not support the audio element.
</audio>
<!-- 画布容器 -->
<div ref="siriwaveContainer" class="siriwave-container"></div>
<!-- 控制按钮 -->
<button @click="togglePlay">{{ isPlaying ? '暂停' : '播放' }}</button>
</div>
</template>
<script>
import SiriWave from 'siriwave';
export default {
name: 'AudioWithSiriWave',
data() {
return {
siriWave: null,
isPlaying: false, // 播放状态标志
};
},
mounted() {
this.initSiriWave();
},
methods: {
initSiriWave() {
this.siriWave = new SiriWave({
container: this.$refs.siriwaveContainer,
width: window.innerWidth,
height: 200,
style: 'ios', // 'ios' 或者 'ios9'
amplitude: 1,
speed: 0.05,
frequency: 3,
color: '#ffffff',
cover: true,
autostart: false,
});
},
onPlay() {
this.isPlaying = true;
this.siriWave.start();
},
onPause() {
this.isPlaying = false;
this.siriWave.stop();
},
onTimeUpdate() {
// 可以在这里根据音频的播放进度调整 SiriWave 的参数,例如振幅
const audioElement = this.$refs.audio;
const volume = audioElement.volume; // 获取当前音量
this.siriWave.setAmplitude(volume); // 设置振幅
},
togglePlay() {
const audioElement = this.$refs.audio;
if (this.isPlaying) {
audioElement.pause();
} else {
audioElement.play();
}
},
},
beforeDestroy() {
if (this.siriWave) this.siriWave.stop();
},
};
</script>
<style scoped>
.siriwave-container {
width: 30%;
height: 200px;
background-color: #000;
}
button {
margin-top: 10px;
}
</style>
3 视频处理库
video.js
video.js处理简单视频 支持h5和Flash的视频播放器
npm install -s video.js
<template>
<div class="video_text">
<video
id="myVideo"
class="video-js">
<source
:src="videoUrl"
type="video/mp4"
>
</video>
</div>
</template>
<script>
import Video from 'video.js'
import 'video.js/dist/video-js.css'
export default {
name: "Video",
data() {
return {
videoUrl:'../src/audio/11.mp4',//视频地址
};
},
mounted() {
this.initVideo(); //初始化视频播放器
},
methods: {
initVideo() {
//初始化视频方法
let myPlayer = Video(myVideo, {
//是否显示控制栏
controls: true,
//是否自动播放,muted:静音播放
autoplay: false,
//是否静音播放
muted:false,
//是否流体自适应容器宽高
fluid:true,
//设置视频播放器的显示宽度(以像素为单位)
width: "800px",
//设置视频播放器的显示高度(以像素为单位)
height: "400px"
});
}
}
};
</script>
<style lang="less">
.video_text{
width: 500px;
margin: 0 auto;
}
video:focus{
outline: 0; //去掉选中蓝框
}
.video-js .vjs-big-play-button{
/*对播放按钮的样式进行设置*/
width: 100px;
height: 60px;
position: absolute;
top: 50%;
left: 50%;
transform: translate3d(-50%,-50%,0);
}
</style>
vue-video-player
vue-video-player 是一个基于 video.js 的 Vue 组件封装,
支持多种流媒体协议(如 HLS、DASH 等)
首先你需要同时安装以下:
npm install video.js vue-video-player
如加载本地视频要放在public文件下内引用
<template>
<div>
<video-player ref="videoPlayerRef" :options="playerOptions" @player-ready="onPlayerReady"></video-player>
</div>
</template>
<script>
export default {
data() {
return {
playerOptions: {
autoplay: false, // 是否自动播放
controls: true, // 是否显示控制条
preload: 'auto', // 预加载模式
loop: false, // 是否循环播放
width: 640, // 视频宽度
height: 360, // 视频高度
fluid: false, // 是否自适应宽度
sources: [
{
type: 'video/mp4',
src: '../public/11.mp4' // 本地加载视频放在public里面
}
]
}
}
},
methods: {
onPlayerReady(player) {
// 播放器准备好后,在这里进行操作
player.on('play', () => {
console.log('视频开始播放');
});
player.on('pause', () => {
console.log('视频暂停播放');
});
player.on('ended', () => {
console.log('视频播放结束');
});
}}
}
</script>
<style scoped>
/* 可以在这里添加自定义样式 */
</style>
Dplayer
Dplayer同提供了播放与弹幕功能
npm i dplayer -S
npm i dplayer -S // 视频播放器插件
npm i hls.js -S // 播放hls流插件
npm i flv.js -S // 播放flv流插件
<template>
<div class="main">
<div id="dplayer" class="dplayer video-box"></div>
</div>
</template>
<script lang='ts'>
import { reactive, toRefs, onMounted } from 'vue';
// import Hls from 'hls.js' //用于播放hlv格式
// import Flv from 'flv.js' //用于播放m3u8格式
import DPlayer from 'dplayer';
export default {
name: 'VideoPlayer',
setup() {
const data = reactive({
// 这里可以定义响应式数据
});
const loadVideo = () => {
const dp = new DPlayer({
container: document.getElementById('dplayer'), // 使用正确的容器 ID
autoplay: false,//自动播放视频
live: false, //是否直播
muted: false, // 是否静音播放,默认情况下将会消除任何音频。
theme: '#b7daff',//主题颜色
loop: false,//循环播放音乐
lang: 'zh-cn',//语言,`zh'用于中文,`en'用于英语
screenshot: true,//启用截图功能
hotkey: true,//绑定热键,包括左右键和空格
preload: 'auto',//预加载的方式可以是'none''metadata''auto',默认值:'auto'
volume: 0,//初始化音量
playbackSpeed: [0.5, 1, 2, 4, 8],//播放速度
mutex: false,//互斥,阻止多个播放器同时播放,当前播放器播放时暂停其他播放器
preventClickToggle: false,//阻止点击播放器时候自动切换播放/暂停
logo: '',
video: {
pic: '../src/assets/1.png',//封面地址
url: '../src/audio/11.mp4', //视频地址
type: 'auto',
notSupportedMessage: '此视频暂无法播放,请稍后再试。', // 不支持播放的消息
// customType: {
// //自定义播放类型文件《type需要设置为'customHls'》
// customHls: function (video, player) {
// const hls = new Hls();
// hls.loadSource(video.src);
// hls.attachMedia(video);
// },
// customFlv: function (video, player) {
// //自定义播放类型文件《type需要设置为'customFlv'》
// const flvPlayer = FlvJs.createPlayer({
// type: 'flv',
// url: video.src,
// });
// flvPlayer.attachMediaElement(video);
// flvPlayer.load();
// },
// },
},
});
dp.volume(0, true, true); // 设置音量为0
dp.on('loadedmetadata', () => {
dp.play();
});
//视频流出问题时触发
dp.on('error', () => {
if (document.getElementById('dplayer')) {
let time = Number(Math.round(dp.video.currentTime + 1));
loadVideo();
dp.seek(time);
}
});
return dp; // 返回 DPlayer 实例
};
onMounted(() => {
loadVideo(); // 确保在组件挂载后加载视频
});
const refData = toRefs(data);
return {
...refData,
loadVideo,
};
},
};
</script>
<style lang="scss" scoped>
.main{
width: 500px;
height: 300px;
}
.video-box {
width: 100%;
height: 100%;
}
// 禁用视频右键菜单
:deep(.dplayer-menu),
:deep(.dplayer-mask) {
display: none !important;
}
// //隐藏底部操作栏
// :deep(.dplayer-controller) { display: none; }
// //隐藏底部操作栏动画
// :deep(.dplayer-controller-mask) { display: none; }
</style>
easy-player
支持摄像头各种格式视频
支持对外开启录像功能
不是 npm 的官方包,通常你需要从 GitHub 或其他来源获取其源码,并手动引入到你的项目中
plyr
支持h5音视频, YouTube 视频,Vimeo 视频,支持直播流(HLS, Dash等)
支持字幕和画中画,倍速播放,预览缩略图,快捷键控制
全屏播放,清晰度切换
npm install plyr --save
<template>
<div>
<!-- 视频播放器容器 -->
<video ref="playerRef" playsinline controls style="width:450px;height:250px">
<source src="./audio/11.mp4" type="video/mp4" />
</video>
</div>
</template>
<script setup>
import { onMounted, ref } from 'vue';
import Plyr from 'plyr';
import 'plyr/dist/plyr.css';
const playerRef = ref(null);
onMounted(() => {
if (playerRef.value) {
// 初始化 Plyr 播放器
const player = new Plyr(playerRef.value, {
controls: [
'play-large', // 大播放按钮
'play', // 播放/暂停按钮
'progress',// 进度条
'current-time',// 当前播放时间
'mute',// 静音按钮
'volume',// 音量控制
'captions',// 字幕
// 'settings',// 设置
'fullscreen',// 全屏按钮
'pip' // 开启画中画按钮
],
// 缩略图配置
seekThumbnails: {
enabled: true,
src: '' // 替换为实际的缩略图文件路径
}
});
// 监听播放事件
player.on('play', () => {
console.log('视频开始播放');
});
// 监听暂停事件
player.on('pause', () => {
console.log('视频暂停播放');
});
// 监听播放结束事件
player.on('ended', () => {
console.log('视频播放结束');
});
// 监听画中画事件
player.on('enterpictureinpicture', () => {
console.log('进入画中画模式');
});
player.on('leavepictureinpicture', () => {
console.log('离开画中画模式');
});
}
});
</script>
<style scoped>
/* 可根据需要添加自定义样式 */
</style>
NPlayer
支持任何流媒体和 B 站弹幕体验的视频播放器
pnpm i -S nplayer @nplayer/vue
mian.ts
import {createApp} from 'vue'
import NPlayer from "@nplayer/vue";
const app = createApp(App)
app.use(NPlayer)
app.mount('#app')
简单配置
<template>
<div id="app">
<div id="player-container"></div>
</div>
</template>
<script>
import { onMounted } from 'vue';
import Player from 'nplayer';
export default {
name: 'App',
setup() {
onMounted(() => {
const player = new Player({
src: '../src/audio/11.mp4'
});
player.mount('#player-container'); // 将播放器挂载到 #player-container 元素上
});
return {};
}
};
</script>
<style>
#player-container {
width: 100%;
height: 400px; /* 根据需要调整高度 */
}
</style>
弹幕功能
要实现NPlayer弹幕功能,需要安装插件@nplayer/danmaku
npm i -S @nplayer/danmaku
存储静态的弹幕列表1.ts
src/utils/1.ts
export default [
{
text: "口技 ",
time: 0
},
{
text: "傻袍子 ",
time: 0,
color: "#2196F3"
},
{
text: "233真的是摔啊 ",
time: 1,
color: "#2196F3"
},
{
text: "同时出土可以减少被吃的数量 ",
time: 1,
color: "#673AB7"
},
{
text: "女孩子可以养一只 ",
time: 1
},
{
text: "真正的黑恶势力 ",
time: 1,
color: "#2196F3"
},
{
text: "你 ",
time: 1,
color: "#673AB7"
},
{
text: "开学前一天的我 ",
time: 2,
color: "#E91E63"
},
{
text: "好灵活啊 ",
time: 3
},
{
text: "猞猁尾巴很短的 这个尾巴长 ",
time: 4
},
{
text: "碰到贝爷。。。 ",
time: 4
},
{
text: "啊啊啊啊啊啊啊啊啊啊啊 ",
time: 4,
color: "#673AB7"
},
{
text: "非主流发型 ",
time: 4
},
{
text: "carcass。。 ",
time: 5,
color: "#E91E63"
},
{
text: "好久沒見到老虎了啊…… ",
time: 6,
color: "#2196F3"
},
{
text: "前面说黄鸡宠物的别跑 ",
time: 6
},
{
text: "哇 大猫 ",
time: 6,
color: "#2196F3"
},
{
text: "鸳鸯戏水就是这么来的 ",
time: 7
},
{
text: "落叶这一幕,是梁思成与林徽因里面的! ",
time: 7
},
{
text: "口技了得 ",
time: 7
},
{
text: "不追求难道要坐等灭绝吗。。。 ",
time: 8
},
{
text: "爪子毛茸茸的 ",
time: 8,
color: "#E91E63"
},
{
text: "还弹起来了 ",
time: 9
},
{
text: "30厘米 ",
time: 9
},
{
text: "又被对面打野抓了 ",
time: 10,
color: "#673AB7"
}
];
页面使用
<template>
<div id="app">
<div id="player-container"></div>
</div>
</template>
<script>
import { onMounted } from 'vue';
import Player from 'nplayer';
import Danmaku from '@nplayer/danmaku';
import '../src/assets/styles/bilibil.css';
import items from '../src/utils/1.ts';
export default {
name: 'App',
setup() {
onMounted(() => {
// 弹幕插件配置
const danmakuPlugin = new Danmaku({
items,
autoInsert: true
});
// 弹幕插件事件
const danmakuEvent = {
apply(player) {
player.on('DanmakuSend', (opts) => {
console.log(opts);
});
player.on(player.danmaku.DANMAKU_UPDATE_OPTIONS, () => {
console.log(player.danmaku.opts);
});
}
};
// 播放器配置
const options = {
// 视频源文件路径
src: '../src/audio/11.mp4',
// 快进/快退的时间步长(秒)
seekStep: 5,
// 是否启用垂直音量条
volumeVertical: true,
// 音量条的长度
volumeBarLength: '120px',
// 是否启用封面
posterEnable: true,
// 封面图片路径
poster: '../src/audio/1.png',
// 插件列表,包含通用事件插件、弹幕插件和弹幕事件插件
plugins: [
{
// 通用事件插件,监听播放器挂载和播放事件
apply(player) {
player.on('Mounted', () => console.log('mounted'));
player.on('Play', () => console.log('play'));
}
},
danmakuPlugin, // 弹幕插件实例
danmakuEvent // 弹幕事件处理
],
// 控制栏配置,定义了控制栏中显示的按钮和组件
controls: [
['play', 'volume', 'time', 'spacer', 'danmaku-settings', 'airplay', 'settings', 'web-fullscreen', 'fullscreen'], // 主控制栏
['progress'] // 进度条控制栏
]
};
const player = new Player(options);
player.mount('#player-container');
});
return {};
}
};
</script>
<style>
#player-container {
width: 100%;
height: 400px; /* 根据需要调整高度 */
}
/* 自定义 Bilibili 主题样式 */
#player-container {
background-color: #000;
position: relative;
}
.nplayer {
background-color: #000;
}
.nplayer__controls {
background-color: rgba(0, 0, 0, 0.7);
}
.nplayer__controls__time {
color: #fff;
}
.nplayer__controls__progress {
background-color: #fff;
}
.nplayer__controls__progress__buffer {
background-color: rgba(255, 255, 255, 0.3);
}
.nplayer__controls__progress__played {
background-color: #fff;
}
.nplayer__controls__button {
color: #fff;
}
.nplayer__controls__volume__slider {
background-color: #fff;
}
.nplayer__controls__volume__slider__played {
background-color: #fff;
}
.nplayer__controls__fullscreen {
color: #fff;
}
.nplayer__poster {
background-color: #000;
}
.nplayer__poster__img {
object-fit: cover;
}
/* 弹幕样式 */
.nplayer__danmaku {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
pointer-events: none;
}
.nplayer__danmaku__item {
position: absolute;
color: #fff;
font-size: 16px;
white-space: nowrap;
}
</style>
海康监控H5视频播放
视频播放同步音波
peaks.js
npm install --save peaks.js
npm install --save konva
npm install --save waveform-data
初始化option中,只需要修改mediaElement中的dom就可以展示音频或视频波形图
<template>
<div>
<video ref="video" controls style="width: 800px;">
<source src="../src/audio/11.mp4" type="video/mp4">
Your browser does not support the video tag.
</video>
<div id="peak-container">
<div id="zoomview-container" ref="zoomview"></div>
<div id="overview-container" ref="overview"></div>
</div>
<!-- <div id="demo-controls">
<audio ref="audio" id="audio" controls="controls">
<source src="../public/01.wav" type="audio/mpeg" />
</audio>
</div> -->
<div>
<el-button @click="delel">销毁</el-button>
<el-button @click="play">播放/暂停</el-button>
缩略图展示的时间区间
<el-button @click="big" id="big" ref="big">收起</el-button>
<el-button @click="small" id="small" ref="small">展开</el-button>
<el-button id="seek" ref="resetPaly" @click="resetPaly">重置</el-button>
波幅调节
<el-slider
vertical
height="200px"
@input="input"
v-model="scaleValue"
id="amplitude-scale"
:format-tooltip="formatTooltip"
:step="10"
></el-slider>
<el-button @click="zoomable">显示/隐藏可缩放波形图</el-button>
</div>
</div>
</template>
<script>
import Peaks from "peaks.js";
const amplitudeScales = {
0: 0.0,
1: 0.1,
2: 0.25,
3: 0.5,
4: 0.75,
5: 1.0,
6: 1.5,
7: 2.0,
8: 2.5,
9: 3.0,
10: 3.5,
};
export default {
data() {
return {
playPause: true, //true 播放 false 暂停
peaks: "",
// 只能将其隐藏,dom元素需要保留
dispaly: false,
scaleValue: 50,
};
},
created() {
this.$nextTick(() => {
const options = {
zoomview: {
container: this.$refs.zoomview,
},
overview: {
container: this.$refs.overview,
},
// 只需要修改dom就可以展示音频或视频波形图
mediaElement: this.$refs.video,
webAudio: {
audioContext: new AudioContext(),
},
emitCueEvents: true,
// //显示轴标签
showAxisLabels: true,
};
Peaks.init(options, (err, peaks) => {
if (err) {
console.log("messge " + err);
return;
}
this.peaks = peaks;
});
});
},
methods: {
delel() {
this.peaks.destroy();
//必须将其src清空或video销毁,不然音频任可以播放
this.$refs.video.src = "";
},
play() {
if (this.playPause) {
this.peaks.player.play();
this.playPause = false;
} else {
this.peaks.player.pause();
this.playPause = true;
}
},
big() {
console.log(this.peaks);
this.peaks.zoom.zoomIn();
console.log("收起");
},
small() {
this.peaks.zoom.zoomOut();
console.log("展开");
},
resetPaly() {
this.peaks.player.seek(0.0);
console.log("重置");
// }
},
input(value) {
let scale = amplitudeScales[value / 10];
// 隐藏缩略图时候进行处理
if (this.peaks.views.getView("zoomview")) {
this.peaks.views.getView("zoomview").setAmplitudeScale(scale);
}
this.peaks.views.getView("overview").setAmplitudeScale(scale);
},
formatTooltip(val) {
return val / 10;
},
zoomable() {
let container = document.getElementById("zoomview-container");
let zoomview = this.peaks.views.getView("zoomview");
if (zoomview) {
this.peaks.views.destroyZoomview();
container.style.display = "none";
} else {
container.style.display = "block";
this.peaks.views.createZoomview(container);
}
},
},
};
</script>
<style>
#zoomview-container {
height: 300px;
}
#overview-container {
height: 300px;
}
</style>
Wavesurfer.js
<template>
<div>
<div ref="waveform" style="height: 100px;"></div>
<video
ref="video"
controls
playsinline
style="width: 100%; max-width: 600px; margin: 0 auto; display: block;">
<source :src="audioSrc" type="video/mp4">
Your browser does not support the video tag.
</video>
</div>
</template>
<script setup>
import { ref, onMounted, onBeforeUnmount, watch } from 'vue';
import WaveSurfer from 'wavesurfer.js';
// Data
const waveform = ref(null);
const video = ref(null);
const audioSrc = new URL('../src/audio/11.mp4', import.meta.url).href;
let wavesurfer = null;
// Methods
const initWaveform = () => {
wavesurfer = WaveSurfer.create({
container: waveform.value,
waveColor: '#36b050',
progressColor: '#009688',
cursorColor: 'black',
backend: 'MediaElement', // 使用 MediaElement 后端来支持视频元素
mediaControls: true, // 如果您希望 WaveSurfer 控制媒体控件,请设为 true
media: video.value, // 直接传递视频元素
});
// 监听 ready 事件确保 WaveSurfer 已经准备好
wavesurfer.on('ready', () => {
console.log('WaveSurfer is ready');
});
// 设置同步逻辑
const syncVideoAndWaveSurfer = () => {
video.value.addEventListener('play', () => wavesurfer.play());
video.value.addEventListener('pause', () => wavesurfer.pause());
video.value.addEventListener('seeked', () => wavesurfer.seekTo(video.value.currentTime / video.value.duration));
wavesurfer.on('seek', (pos) => {
video.value.currentTime = pos * video.value.duration;
});
wavesurfer.on('finish', () => {
video.value.pause();
video.value.currentTime = 0; // 可选:回到视频开始位置
});
};
// 确保视频已经加载元数据后再进行同步
watch(() => video.value.readyState, (newVal) => {
if (newVal >= 2 && wavesurfer.isReady) {
syncVideoAndWaveSurfer();
}
}, { immediate: true });
wavesurfer.load(audioSrc); // 加载音频文件
};
// Lifecycle hooks
onMounted(() => {
initWaveform();
});
onBeforeUnmount(() => {
if (wavesurfer) {
wavesurfer.destroy(); // 清理 WaveSurfer 实例
}
// 移除所有事件监听器以防止内存泄漏
if (video.value) {
video.value.removeEventListener('play', wavesurfer.play.bind(wavesurfer));
video.value.removeEventListener('pause', wavesurfer.pause.bind(wavesurfer));
video.value.removeEventListener('seeked', wavesurfer.seekTo.bind(wavesurfer));
}
});
</script>
<style scoped>
/* 在这里添加样式 */
</style>