基本原理
使用摄像头计算人体心率的方法达不到专业设备那样准确,只能作为娱乐参考使用。
功能实现原理是,通过手指覆盖拍照摄像头,逐帧分析摄像头返回图像红色像素点的变化来进行判断是否有进行心跳脉冲,如分析发现明显的波动行为则表示心跳一次,为保证捕捉到清晰的红色像素点变化需要在扫描摄像头的同时打开相机手电。
心率变异性的计算方案则是通过保存每次心跳的间隔差的集合在最后做方差和标准差的计算。
接下来直接贴上完整代码,下面代码中使用的摄像头控件为CameraView,接入方法自行查看。动态申请相机权限自行完成。
//build.gradle中导入cameraview控件
implementation 'com.otaliastudios:cameraview:2.7.2'
//Manifest.xml中申请相机权限
<uses-permission android:name="android.permission.CAMERA" />
override fun onCreate(savedInstanceState: Bundle?) {
...
binding.vwCamera.setLifecycleOwner(this)
binding.vwCamera.addCameraListener(object : CameraListener() {
})
binding.vwCamera.addFrameProcessor {
//每一帧的回调,在这里检查用户是否放上手指,并做计时然后计算心率
handleFrameCamera(it)
}
}
private val processing = AtomicBoolean(false)
private var averageIndex = 0
private val averageArraySize = 4
private val averageArray = IntArray(averageArraySize)
//设置默认类型
private var currentType = TYPE.GREEN
/**
* 类型枚举
* @author liuyazhuang
*/
enum class TYPE {
GREEN, RED
}
//获取当前类型
fun getCurrent(): TYPE {
return currentType
}
//心跳数组
private val beatsArray = arrayListOf<Int>()
//心跳脉冲
private var beats = 0f
private var allBeats = 0
private var flag = 1.0
//开始时间
private var startTime: Long = 0
private var beatBeanTimeList = arrayListOf<Long>()
private var beatBeanTimeStart = 0L
private val vwLinePos = mutableListOf<Float>()
private val vwLinePosValue = mutableListOf(20f, 0f, -20f, 10f, -10f, 0f)
/**
* 处理每一帧的图像,已经在子线程中处理
*/
private fun handleFrameCamera(frame: Frame) {
val size: Size = frame.size
if (frame.dataClass === ByteArray::class.java) {
val data: ByteArray = frame.getData()
//processing是true直接退出这一步流程,初始执行是false,第一步在这里赋值为true了,如果还没执行到后面的流程下一帧就来了的话直接舍弃掉这一帧
if (!processing.compareAndSet(false, true)) return
val width: Int = size.width
val height: Int = size.height
//图像处理
val imgAvg: Int = decodeYUV420SPtoRedAvg(data.clone(), height, width)
//imgAvg小于200表示手指没有覆盖到摄像头,这时候终端整个流程还是说保存流程在一定时间内重启的话就继续
if (imgAvg < 200) {
runOnUiThread {
stopScanView()
binding.tvTip1.text = "No finger detected!"
}
} else
runOnUiThread {
startScanView()
binding.tvTip1.text =
"Hold your finger and don't move it."
}
//像素平均值imgAvg,日志
//Log.i(TAG, "imgAvg=" + imgAvg);
if (imgAvg == 0 || imgAvg == 255) {
//红色像素的均值为0或者255时表示极端不正确的情况,直接退出,并重置标志位为false,下一帧图片再次执行赋值为true
processing.set(false)
beatBeanTimeStart = 0L//如果中途有手指移出等情况就清空上次保存的心跳时间,约等于舍弃掉这次不正常的记录
return
}
//计算4次帧图的像素均值列表值的和与个数
var averageArrayAvg = 0
var averageArrayCnt = 0
for (i in averageArray.indices) {
if (averageArray[i] > 0) {
averageArrayAvg += averageArray[i]
averageArrayCnt++
}
}
//计算整体全部帧图像素平均值
val rollingAverage = if (averageArrayCnt > 0) averageArrayAvg / averageArrayCnt else 0
if (rollingAverage == 0 && imgAvg > 200) {
startTime = System.currentTimeMillis()
runOnUiThread {
binding.progressBar.setProgressWithAnimation(1 * 100 / 15f, 2000)
}
}
var newType: TYPE = currentType
//如果当前帧像素平均值小于前4次帧像素平均值的话
if (imgAvg in 201 until rollingAverage) {
newType = TYPE.RED
if (newType != currentType) {
beats++
allBeats++
flag = 0.0
//这里表示心跳了一下,保存与上一次心跳的时间间隔,后续要用作心率变异性计算
if (beatBeanTimeStart == 0L) {
//保存第一次心跳
beatBeanTimeStart = System.currentTimeMillis()
} else {
val nowTime = System.currentTimeMillis()
val rrTime = nowTime - beatBeanTimeStart
if (rrTime in 400..1400)
beatBeanTimeList.add(nowTime - beatBeanTimeStart)
beatBeanTimeStart = nowTime
}
vwLinePos.clear()
runOnUiThread {
vwLinePos.add(20f)
// LogUtils.e("波形图,心跳了", vwLinePos.last())
binding.vwLine.showLine(vwLinePos.last())
}
}
} else {
//心脏跳动控制六帧,六帧后恢复平静
newType = TYPE.GREEN
runOnUiThread {
if (vwLinePos.size < 6 && allBeats > 0) {
vwLinePos.add(vwLinePosValue[vwLinePos.size])
// LogUtils.e("波形图,心跳中", vwLinePos.last())
binding.vwLine.showLine(vwLinePos.last())
} else {
vwLinePos.add(0f)
// LogUtils.e("波形图,没心跳", vwLinePos.last())
binding.vwLine.showLine(vwLinePos.last())
}
}
}
//保存4次帧像素平均值,4次后重置
if (averageIndex == averageArraySize) averageIndex = 0
averageArray[averageIndex] = imgAvg
averageIndex++
// Transitioned from one state to another to the same
if (newType !== currentType) {
currentType = newType
//image.postInvalidate();
}
//获取系统结束时间(ms)
val endTime = System.currentTimeMillis()
val totalTimeInSecs: Float =
(endTime - startTime) / 1000f//当前帧到达的时间减去摄像头初始的时间就等于第一帧的时间差
if (totalTimeInSecs >= 2) {//2秒处理一次
val bps: Float =
beats / totalTimeInSecs//脉冲次数表示当前间隔时间内的心跳次数,心跳次数/心跳持续时间 = 一秒内的心跳次数
val dpm = (bps * 60.0).toInt()//1秒内的心率乘以60等于一分钟内的心率也就是计算出来的心率值
LogUtils.e(
"time:$totalTimeInSecs",
"2秒内的心跳次数:$beats",
"每秒心跳次数:$bps",
"心率:$dpm"
)
if (dpm < 30 || dpm > 180 || imgAvg < 200) {//这里是心率不合规的情况,心率小于30或者心率大于180或者当前帧像素平均值小于200(手指未正确覆盖),初始化程序继续探查
//获取系统开始时间(ms)
startTime = System.currentTimeMillis()
beatBeanTimeStart = startTime
//beats心跳总数
beats = 0f
processing.set(false)
return
}
//Log.e(TAG, "totalTimeInSecs=" + totalTimeInSecs + " beats="+ beats);
//存储正常心率进心率表,心率表只保存近三次数据,新的数据来会顶掉
beatsArray.add(dpm)
var beatsArrayAvg = 0
var beatsArrayCnt = 0
for (i in beatsArray) {
if (i > 0) {
beatsArrayAvg += i
beatsArrayCnt++
}
}
val beatsAvg = beatsArrayAvg / beatsArrayCnt
runOnUiThread {
if (beatsArray.size < 15)
binding.progressBar.setProgressWithAnimation(
(beatsArray.size + 1) * 100 / 15f,
2000
)
binding.tvBpm.text = beatsAvg.toString()
}
//获取系统时间(ms)
startTime = System.currentTimeMillis()
beats = 0f
//总共记录半分钟或者1分钟的心率变化,半分钟后给结果,就是记录15次,心率数组保存15次的数据后删除。如果想更准确的话就加大记录的心跳数值
if (beatsArray.size == 15) {
binding.vwCamera.close()
//中断探测,得出结果去结果页
//心率等于一分钟心跳多少次,由于我们两秒钟记录一次所以这里的allBeats*2就应该是一分钟的心跳次数
var bpm = allBeats * 2//心率
hrvTimeList?.let {
//计算心率变异性
var allTime = 0L
for (time in it) {
allTime += time
}
var min = 0L
var max = 0L
for (time in it) {
//保留最大和最小值
if (min == 0L || time < min) {
min = time
}
if (max == 0L || time > max) {
max = time
}
}
//去掉一个最大和最小减小误差
it.remove(min)
it.remove(max)
var timeAvg = allTime / it.size
var biaozhunchaneibu = 0f
for (time in it) {
biaozhunchaneibu += (time - timeAvg) * (time - timeAvg)
}
var hrv = sqrt(biaozhunchaneibu / (it.size)).toInt()//心率变异性
}
}
}
processing.set(false)
} else if (frame.dataClass === Image::class.java) {
LogUtils.e("相机", "camera2帧数据返回")
val data: Image = frame.getData()
// Process android.media.Image...
}
}
/**
* 内部调用的处理图片的方法
* @param yuv420sp
* @param width
* @param height
* @return
*/
fun decodeYUV420SPtoRedSum(yuv420sp: ByteArray?, width: Int, height: Int): Int {
if (yuv420sp == null) return 0
val frameSize = width * height
var sum = 0
var j = 0
var yp = 0
while (j < height) {
var uvp = frameSize + (j shr 1) * width
var u = 0
var v = 0
var i = 0
while (i < width) {
var y = (0xff and yuv420sp[yp].toInt()) - 16
if (y < 0) y = 0
if (i and 1 == 0) {
v = (0xff and yuv420sp[uvp++].toInt()) - 128
u = (0xff and yuv420sp[uvp++].toInt()) - 128
}
val y1192 = 1192 * y
var r = y1192 + 1634 * v
var g = y1192 - 833 * v - 400 * u
var b = y1192 + 2066 * u
if (r < 0) r = 0 else if (r > 262143) r = 262143
if (g < 0) g = 0 else if (g > 262143) g = 262143
if (b < 0) b = 0 else if (b > 262143) b = 262143
val pixel = (-0x1000000 or (r shl 6 and 0xff0000)
or (g shr 2 and 0xff00) or (b shr 10 and 0xff))
val red = pixel shr 16 and 0xff
sum += red
i++
yp++
}
j++
}
return sum
}
/**
* 对外开放的图像处理方法
* @param yuv420sp
* @param width
* @param height
* @return
*/
fun decodeYUV420SPtoRedAvg(
yuv420sp: ByteArray?, width: Int,
height: Int
): Int {
if (yuv420sp == null) return 0
val frameSize = width * height
val sum = decodeYUV420SPtoRedSum(yuv420sp, width, height)
return sum / frameSize
}