前言:
心电图是被记录在布满大小方格的纸上,所以想要知道心电图怎么看,首要的是知道这些格子代表的意义。这些方格中每一条细竖线相隔1mm,每一条细横线也是相隔1mm,它们围成了1mm见方的小格。粗线是每五个小格一条,每条粗线之间相隔就是5mm,横竖粗线又构成了大方格。
心电图记录纸是按照国际规定的标准速度移动的,移动速度为25mm/s,也就是说横向的每个小细格代表0.04s;每两条粗线之间的距离就是代表0.2s。
国际上对记录心电图时的外加电压也是有规定的,即外加1mV电压时,基线就应该准确地抬高10个小格,也就是说,每个小横格表示0.1mV,而每个大格就表示0.5mV,每两个大格就代表了这1mV。
特点:
- 走纸速度固定:25mm/s
- 一屏显示的数据量不确定;
- 以下采用的是心电(ECG)的知识,填充的是心音(PCG)的数据,所以采样率是8000;
1、Android实现:
网格图:
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
public class GridView extends View {
private int width, height;
private int row = 5;
private Paint paint;
public GridView(Context context) {
this(context, null);
}
public GridView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public GridView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.FILL);
}
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
// super.onLayout(changed, left, top, right, bottom);
width = getWidth();
height = getHeight();
}
protected void onDraw(Canvas canvas) {
// super.onDraw(canvas);
paint.setStrokeWidth(2);
drawGird(canvas, row * 5, Color.parseColor("#D7D7D7"));
paint.setStrokeWidth(4);
drawGird(canvas, row, Color.parseColor("#D7D7D7"));
}
private void drawGird(Canvas canvas, int cols, int color) {
paint.setColor(color);
float rowSpace = height * 1f / cols;
//画竖线
for (int i = 0; i * rowSpace <= width; i++) {
canvas.drawLine(i * rowSpace, 0, i * rowSpace, height, paint);
}
//画横线
for (int i = 0; i <= cols; i++) {
canvas.drawLine(0, i * rowSpace, width, i * rowSpace, paint);
}
}
// @Override
// protected void onSizeChanged(int w, int h, int oldw, int oldh) {
// super.onSizeChanged(w, h, oldw, oldh);
// width = w;
// height = h;
// invalidate();
// }
}
波形图:
package com.kl.analyze.view.wave;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.View;
import androidx.annotation.NonNull;
import androidx.core.content.ContextCompat;
import com.kl.common.base.BaseApp;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class AudioWaveView extends SurfaceView implements Callback, Runnable, Serializable {
private final List<Short> pointList = Collections.synchronizedList(new LinkedList<>());
private Paint mPaint;
private SurfaceHolder mHolder;
private int mWidth = 0;
private int mCenterY = 0;
//网格颜色
protected int mWaveColor = com.kl.common.R.color.colorPrimary;
public float[] points;
private boolean isRunning = false;
private boolean isDraw = false;
List<Short> nativeDatas = null;
volatile ThreadPoolExecutor singleThreadExecutor;//单线程池
private double zoom;//每一毫米所占像素(mm/px)
private float gapX = 0.2f;// x轴每个点所占的像素(px)
private int xSize = 0;// x轴能绘制的点的数量
private final int FILTER_SIZE = 50;//采样率太高,数据抽稀
public void run() {
while (isRunning) {
if (isDraw) {
drawFrame();
} else {
Thread.yield();
}
}
}
public void addWaveData(short[] waveDataArray) {
if (!isRunning || waveDataArray == null) {
return;
}
if (nativeDatas == null) {
nativeDatas = new ArrayList<>();
}
for (short waveData : waveDataArray) {
int maxMidScopeY = 0;// ADC输出值的范围:-32768 ~ 32767,中间值为0
int gain = 5;//波形增益,5mm/mv
short y = (short) Math.floor(calcRealMv(maxMidScopeY - waveData) * gain * zoom + mCenterY);
nativeDatas.add(y);
}
if (nativeDatas.size() >= 800) {
addPointThreadExecutor(nativeDatas);
nativeDatas = new ArrayList<>();
}
}
private void addPointThreadExecutor(List<Short> nativeDatas) {
if (!isRunning || nativeDatas == null) {
return;
}
if (singleThreadExecutor == null || singleThreadExecutor.isShutdown()) {
startSingleThreadExecutor();
return;
}
//Log.e("====>", "singleThreadExecutor.getQueue().size() = " + singleThreadExecutor.getQueue().size());
if (singleThreadExecutor.getQueue().size() >= 5) {
Log.e("====>", "singleThreadExecutor.getQueue().size() = " + singleThreadExecutor.getQueue().size());
return;
}
singleThreadExecutor.execute(new Runnable() {
public void run() {
// Log.d("caowj", "单线程执行");
List<Short> dataList = nativeDatas;
synchronized (pointList) {
for (int i = 0; i < dataList.size(); i += FILTER_SIZE) {
if (pointList.size() >= xSize && xSize > 0) {
pointList.remove(0);
}
pointList.add(dataList.get(i));
}
isDraw = true;
}
}
});
}
synchronized void drawFrame() {
Canvas canvas = null;
try {
if (!isRunning) {
return;
}
canvas = mHolder.lockCanvas();
if (canvas != null) {
drawCube(canvas);
} else {
Log.w("caowj", "canvas is null");
}
} catch (Exception e) {
isDraw = false;
Log.w("caowj", "绘制异常:" + e.getMessage());
} finally {
if (canvas != null)
try {
mHolder.unlockCanvasAndPost(canvas);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public AudioWaveView(Context context) {
this(context, null);
}
public AudioWaveView(Context context, AttributeSet attributeSet) {
super(context, attributeSet);
initView();
}
private void initView() {
Log.d("caowj", "initView addCallback");
if (mHolder == null) {
mHolder = getHolder();
mHolder.addCallback(this);
// Log.d("caowj", "initView addCallback");
setZOrderOnTop(true);// 设置画布 背景透明
mHolder.setFormat(PixelFormat.TRANSLUCENT);
}
Rect frame = mHolder.getSurfaceFrame();
mCenterY = frame.centerY();
mWidth = frame.width();
// Log.d("caowj", "initView mCenterY=" + xSize);
if (mPaint == null) {
mPaint = new Paint();
mPaint.setColor(ContextCompat.getColor(BaseApp.instance, mWaveColor));
mPaint.setAntiAlias(true);
mPaint.setStrokeWidth(2);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setStyle(Paint.Style.STROKE);
}
}
public void drawCube(Canvas canvas) {
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
int len = pointList.size();
if (len >= 2) {
int index = xSize - len;
// Log.d("caowj", "drawCube len=" + len + ",widthSize=" + widthSize + ",index=" + index);
for (int i = index + 1; i < xSize; i++) {
float startX = (i - 1) * gapX;
int mIndex = i - index - 1;
points[i * 4] = startX;
points[i * 4 + 1] = pointList.get(mIndex);
points[i * 4 + 2] = i * gapX;
points[i * 4 + 3] = pointList.get(i - index);
}
}
mPaint.setColor(ContextCompat.getColor(BaseApp.instance, mWaveColor));
canvas.drawLines(points, mPaint);
isDraw = false;
}
protected void onVisibilityChanged( View changedView, int visibility) {
super.onVisibilityChanged(changedView, visibility);
// Log.d("caowj", "onVisibilityChanged = " + visibility);
if (visibility == VISIBLE) {
isRunning = true;
} else if (visibility == INVISIBLE) {
isRunning = false;
}
}
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mWidth = w;
mCenterY = h / 2;
// Log.w("caowj", "onSizeChanged mCenterY=" + mCenterY + ",mWidth=" + mWidth);
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d("caowj", "surfaceChanged");
this.mWidth = width;
mCenterY = height / 2;
int maxMillimeter = 25;//纵向最大毫米数(mm)(最小格代表一毫米,有多少格就是多少毫米)
zoom = (double) height / maxMillimeter;//得到每毫米所占像素 px/mm
double screenTotalTime = width / zoom * 0.04; //一屏绘制需要的时间(S);走纸速度25mm/s ==> 每mm需要0.04S
int sampleRate = 8000 / FILTER_SIZE;//真实采样率8k,过滤后的采样率
gapX = (float) (this.mWidth / (sampleRate * screenTotalTime));
xSize = Math.round(this.mWidth / gapX);
points = new float[xSize * 4];
Log.e("caowj", "实时波形:zoom=" + zoom + ",screenTotalTime=" + screenTotalTime + ",width=" + width + ",xSize=" + xSize + ",gapX=" + gapX);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.w("caowj", "surfaceCreated");
initView();
initViewInvisiable();
}
private void initViewInvisiable() {
isRunning = true;
Thread thread = new Thread(this);
thread.start();
startSingleThreadExecutor();
}
public void surfaceDestroyed(SurfaceHolder holder) {
isRunning = false;
Log.w("caowj", "surfaceDestroyed");
}
private float calcRealMv(int point) {
return (float) (point * 3.3 / 32767);
// int magnification = 1000;//TODO 放大倍数
// return (float) (point / magnification * 3.3 / 32767 * 1000);
}
private void startSingleThreadExecutor() {
if (singleThreadExecutor != null && !singleThreadExecutor.isShutdown()) {
singleThreadExecutor.shutdownNow();
}
singleThreadExecutor = new ThreadPoolExecutor(1, 1,
0L, TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<>(10));
}
public void clearDatas() {
if (pointList != null) {
pointList.clear();
}
if (nativeDatas != null) {
nativeDatas.clear();
}
startSingleThreadExecutor();
isDraw = true;
points = new float[xSize * 4];
drawFrame();
}
}
2、Vue实现:
<template>
<div>
<canvas ref="canvas" width="1140" height="300"></canvas>
<div>
<audio ref="audioPlayer" controls ="startPlay" controlsList="nodownload noplaybackrate"></audio>
</div>
</div>
</template>
<script>
import axios from "axios";
export default {
name: "audioWave",
props: ['audioUrl'],
data() {
return {
// audioUrl: "http://121.41.225.74:9091/mintti/app/storage/newFile/c19xqqqtd8ywqyaf8gno.wav",
dataArray: [],
isPlaying: false,
interval: 100, // 设置绘制的时间间隔(单位:毫秒)
fileData: new Int8Array(0),
index: 0,
mWidth: 0,
mHeight: 0,
audio: null,
animationId: null,
lastTime: 0,
zoom: 0,
gapX: 0.2,
xSize: 0,
maxMillimeter: 5 * 5,
STEP_SIZE: 50,
gain: 5,
maxMidScopeY: 0,
}
},
watch: {
audioUrl(newVal) {
// 在组件加载后获取 props 中的参数
this.downloadAudio()
}
},
mounted() {
const ctx = this.$refs.canvas.getContext('2d')
this.drawBg(ctx)
this.audio = this.$refs.audioPlayer
this.initParams()
},
methods: {
initParams() {
this.zoom = this.mHeight / this.maxMillimeter
let screenTotalTime = this.mWidth / this.zoom * 0.04
this.gapX = this.mWidth / (8000 / this.STEP_SIZE * screenTotalTime)
this.xSize = Math.round(this.mWidth / this.gapX)
},
// 下载音频文件
downloadAudio() {
console.log("音频路径:" + this.audioUrl)
axios({
method: 'get',
url: this.audioUrl,
responseType: 'arraybuffer'
}).then(res => {
if (!res) {
return;
}
console.log("音频下载成功")
const responseData = res.data.slice(0)
this.initAudioPlayer(res.data)
this.dataArray = []
this.index = 0
this.fileData = new Int8Array(responseData);
}).catch(error => {
console.error('下载音频时出错:', error);
});
},
initAudioPlayer(audioData) {
// 将 ArrayBuffer 转换为 Uint8Array
const uint8Array = new Uint8Array(audioData);
// 创建 Blob 对象
const blob = new Blob([uint8Array], { type: 'audio/wav' });
// 创建音频 URL
const audioURL = URL.createObjectURL(blob);
// 获取 <audio> 元素
const audioElement = this.$refs.audioPlayer;
this.audio = audioElement
// 设置 <audio> 元素的 src
audioElement.src = audioURL;
audioElement.addEventListener('canplaythrough', () => {
// console.log('音频加载完成, 可流畅播放')
})
// 1、play: 播放监听。
audioElement.addEventListener('play', (e) => {
console.log("当前视频正在播放中" + this.isPlaying);
})
// 2、pause: 暂停监听。
audioElement.addEventListener('pause', (e) => {
this.isPlaying = false
console.log("当前视频已暂停:" + e);
})
// 3、seeked: 查找结束。当用户已经移动到新位置时触发
audioElement.addEventListener('seeked', (e) => {
this.index = Math.round((this.audio.currentTime * 1000) / 100)
this.dataArray = []
console.log("进度条移动到新位置了:" + this.index);
})
// 4、监听音频播放完成事件
audioElement.addEventListener('ended', () => {
this.isPlaying = false
console.log('播放结束')
this.stopPlayback()
})
},
startPlay() {
if (this.audio && this.fileData.byteLength > 0) {
// 播放音频
this.audio.play();
this.isPlaying = true
this.timer()
}
},
timer() {
let curTime = new Date().getTime();
// console.log("播放状态:" + this.isPlaying)
// if (this.audio.currentTime > 1) {
// this.audio.pause()
// return
// }
if ((curTime - this.lastTime) >= this.interval && this.isPlaying) {
// console.log("定时器执行了,耗时:" + (curTime - this.lastTime) + " 播放位置:" + this.audio.currentTime);
this.lastTime = curTime;
// 执行数据刷新操作
this.refreshData();
}
this.animationId = window.requestAnimationFrame(() => {
this.timer()
});
},
refreshData() {
let i = this.index
// console.log("文件总长度:" + this.fileData.byteLength + ",,i=" + i)
if (i * 1600 + 44 > this.fileData.byteLength) {
this.stopAnimation()
return
}
const byteArray = this.fileData.slice(i * 1600 + 44, (i + 1) * 1600 + 44);
// console.log("文件读取范围:" + (i * 1600 + 44) + "----" + ((i + 1) * 1600 + 44) + "" + this.fileData.byteLength)
// 创建一个新的 Uint16Array,长度为 byteArray 的一半
let shortArray = new Int16Array(byteArray.length / 2)
//遍历 byteArray,将每两个字节合并成一个短整型
for (let i = 0; i < byteArray.length; i += 2) {
shortArray[i / 2] = (byteArray[i] & 0xFF) | (byteArray[i + 1] & 0xFF) << 8;
}
for (let i = 0; i < shortArray.length; i += this.STEP_SIZE) {
// console.log(i + "文件short值:" + shortArray[i])
if (this.dataArray.length >= this.xSize && this.xSize > 0) {
this.dataArray.shift()
}
this.dataArray.push(shortArray[i])
}
this.drawAll();
this.index += 1;
},
stopPlayback() {
console.log("停止播放-stopPlayback")
this.isPlaying = false
this.stopAnimation()
const ctx = this.$refs.canvas.getContext('2d')
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height)
this.drawBg(ctx)
},
stopAnimation() {
if (this.animationId) {
cancelAnimationFrame(this.animationId);
this.animationId = null; // 清除引用
}
},
drawAll() {
if (!this.isPlaying) {
return
}
const ctx = this.$refs.canvas.getContext('2d')
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height)
this.drawBg(ctx)
this.drawWaveform(ctx)
},
drawBg(ctx) {
ctx.lineWidth = 1;
this.drawGrid(ctx, this.maxMillimeter);
ctx.lineWidth = 2
this.drawGrid(ctx, this.maxMillimeter / 5)
},
drawGrid(ctx, cols) {
const { width, height } = ctx.canvas
this.mWidth = ctx.canvas.width
this.mHeight = ctx.canvas.height
ctx.strokeStyle = '#ccc'
const rowSpace = height / cols;
// 画竖线
for (let i = 0; i * rowSpace <= width; i++) {
ctx.beginPath();
ctx.moveTo(i * rowSpace, 0);
ctx.lineTo(i * rowSpace, height);
ctx.stroke();
}
// 画横线
for (let i = 0; i <= cols; i++) {
ctx.beginPath();
ctx.moveTo(0, i * rowSpace);
ctx.lineTo(width, i * rowSpace);
ctx.stroke();
}
},
drawWaveform(ctx) {
ctx.beginPath()
ctx.lineWidth = 1
ctx.strokeStyle = '#25ebd7'
let x = 0
let len = this.dataArray.length;
let index = this.xSize - len;
for (let i = index + 1; i < this.xSize; i++) {
const mCenterY = this.mHeight / 2;
const y = Math.floor(this.calcRealMv(this.maxMidScopeY - this.dataArray[i - index - 1]) * this.gain * this.zoom + mCenterY);
// console.log(`i=${i},position=${i - index - 1},,data=${this.dataArray[i - index - 1]},,y=${y},,mCenterY=${mCenterY}`)
x = (i - 1) * this.gapX;
ctx.lineTo(x, y)
ctx.stroke()
}
},
calcRealMv(point) {
return point * 3.3 / 32767;
}
}
}
</script>