此代码的编写花费了脑细胞:在每次编码开启编码器到只需要一次编码器的开启优化
前提:编译好FFMpeg 的各平台的动态库
基本上Android X86_64 、 X86、arm64-v8a、 armeabi-v7a 采用FFmpeg 编码的方式基本一直。差异是内存分配和取指有所不同,如果分配不对,直接闪退。
先看看通用的编码,包括编码器创建、编码、释放编码器。
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Drawing;
namespace FFmpegAnalyzer
{
/// <summary>
/// 编码器
/// </summary>
internal unsafe class FFmpegEncoder
{
/// <param name="srcFrameSize">编码前一帧原始数据的大小</param>
/// <param name="isRgb">rgb数据</param>
/// <param name="detFrameSize">编码后一帧目标数据的大小</param>
public FFmpegEncoder(Size srcFrameSize, bool isRgb , Size detFrameSize )
{
_srcFrameSize = srcFrameSize;
_isRgb = isRgb;
_detFrameSize = detFrameSize == default ? _srcFrameSize : detFrameSize;
_detFrameSize.Width = (_detFrameSize.Width % 2 == 0) ? _detFrameSize.Width : _detFrameSize.Width - 1;
_detFrameSize.Height = (_detFrameSize.Height % 2 == 0) ? _detFrameSize.Height : _detFrameSize.Height - 1;
}
/// <summary>
/// 创建编码器
/// </summary>
public void CreateEncoder(AVCodecID codecFormat)
{
var originPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA;
var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
_pCodec = FFmpeg.avcodec_find_encoder(codecFormat);
if (_pCodec == null)
throw new InvalidOperationException("Codec not found.");
_pCodecContext = FFmpeg.avcodec_alloc_context3(_pCodec);
_pCodecContext->width = _detFrameSize.Width;
_pCodecContext->height = _detFrameSize.Height;
_pCodecContext->framerate = new AVRational { num = 30, den = 1 };
_pCodecContext->time_base = new AVRational {num = 1, den = 30};
_pCodecContext->gop_size = 30;
_pCodecContext->pix_fmt = destinationPixelFormat;
// 设置预测算法
_pCodecContext->flags |= FFmpeg.AV_CODEC_FLAG_PSNR;
_pCodecContext->flags2 |= FFmpeg.AV_CODEC_FLAG2_FAST;
_pCodecContext->max_b_frames = 0;
FFmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryfast", 0);
FFmpeg.av_opt_set(_pCodecContext->priv_data, "tune", "zerolatency", 0);
//打开编码器
FFmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError();
_pConvertContext = FFmpeg.sws_getContext(_srcFrameSize.Width, _srcFrameSize.Height, originPixelFormat, _detFrameSize.Width, _detFrameSize.Height, destinationPixelFormat,
FFmpeg.SWS_BICUBIC, null, null, null);
if (_pConvertContext == null)
throw new ApplicationException("Could not initialize the conversion context.");
var convertedFrameBufferSize = FFmpeg.av_image_get_buffer_size(destinationPixelFormat, _detFrameSize.Width, _detFrameSize.Height, 1);
_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
_dstData = new BytePtr4();
_dstLineSize = new Int4();
FFmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, _detFrameSize.Width, _detFrameSize.Height, 1);
_isCodecRunning = true;
}
/// <summary>
/// 释放
/// </summary>
public void Dispose()
{
if (!_isCodecRunning) return;
_isCodecRunning = false;
//释放编码器
FFmpeg.avcodec_close(_pCodecContext);
FFmpeg.av_free(_pCodecContext);
//释放转换器
Marshal.FreeHGlobal(_convertedFrameBufferPtr);
FFmpeg.sws_freeContext(_pConvertContext);
}
private AVFrame waitToYuvFrame;
/// <summary>
/// 编码
/// </summary>
/// <param name="frameBytes"></param>
/// <returns></returns>
public byte[] EncodeFrames(byte[] frameBytes)
{
if (!_isCodecRunning)
{
throw new InvalidOperationException("编码器未运行!");
}
//行跨度:涉及图像内存取值
var rowPitch = _isRgb ? _srcFrameSize.Width * 3 : _srcFrameSize.Width * 4;
fixed (byte* pBitmapData = frameBytes)
{
waitToYuvFrame = new AVFrame
{
data = new BytePtr8 { [0] = pBitmapData },
linesize = new Int8 { [0] = rowPitch },
height = _srcFrameSize.Height
};
var rgbToYuv = ConvertToYuv(waitToYuvFrame, _detFrameSize);
byte[] buffer;
var pPacket = FFmpeg.av_packet_alloc();
try
{
int error;
do
{
FFmpeg.avcodec_send_frame(_pCodecContext, &rgbToYuv).ThrowExceptionIfError();
error = FFmpeg.avcodec_receive_packet(_pCodecContext, pPacket);
} while (error == FFmpeg.AVERROR(FFmpeg.EAGAIN));
error.ThrowExceptionIfError();
buffer = new byte[pPacket->size];
Marshal.Copy(new IntPtr(pPacket->data), buffer, 0, pPacket->size);
}
finally
{
FFmpeg.av_frame_unref(&rgbToYuv);
FFmpeg.av_packet_unref(pPacket);
}
return buffer;
}
}
/// <summary>
/// 转换成Yuv格式
/// </summary>
/// <param name="waitConvertYuvFrame"></param>
/// <param name="detSize">变化后目标大小</param>
/// <returns></returns>
private AVFrame ConvertToYuv(AVFrame waitConvertYuvFrame,Size detSize)
{
FFmpeg.sws_scale(_pConvertContext, waitConvertYuvFrame.data, waitConvertYuvFrame.linesize, 0, waitConvertYuvFrame.height, _dstData, _dstLineSize);
var data = new BytePtr8();
data.UpdateFrom(_dstData);
var lineSize = new Int8();
lineSize.UpdateFrom(_dstLineSize);
IntPtr address = (IntPtr)(&waitConvertYuvFrame);
Debug.WriteLine("Address: 0x" + address.ToString("X"));
Debug.WriteLine("Size: " + sizeof(AVFrame));
FFmpeg.av_frame_unref(waitConvertYuvFrame);
return new AVFrame
{
data = data,
linesize = lineSize,
width = detSize.Width,
height = detSize.Height
};
}
//编码器
private AVCodec* _pCodec;
private AVCodecContext* _pCodecContext;
//转换缓存区
private IntPtr _convertedFrameBufferPtr;
private BytePtr4 _dstData;
private Int4 _dstLineSize;
//格式转换
private SwsContext* _pConvertContext;
//源数据大小
private Size _srcFrameSize;
//源数据大小
private Size _detFrameSize;
//三通道
private readonly bool _isRgb;
//编码器正在运行
private bool _isCodecRunning;
}
}
上面的编码器使用,在Windows 和Andriod X86_64 armeabi-v7a 平台架构上们都可以正常使用。
但是在arm64-v8a 使用使用是,只要使用 &取指。例如:
1 FFmpeg.av_frame_unref(&waitConvertYuvFrame);
2 FFmpeg.avcodec_send_frame(_pCodecContext, &rgbToYuv)
直接闪退。通过打开FFmpeg 的日志工具 提示内存段错误。既然异常知道了就可以针对处理了,发现是在arm64-v8a主要主动分配内存,那就好修改了,修改的整体代码如下
AVFrame* pFrame = FFmpeg.av_frame_alloc(); //兼容ARM 64位(arm64-v8a)
pFrame->data = new BytePtr8 {[0] = pBitmapData};
pFrame->linesize = new Int8 { [0] = rowPitch };
pFrame->height = _srcFrameSize.Height;
通过主动申请非托管内存替代托管内存。整体代码如下
using System;
using System.Drawing;
using System.Runtime.InteropServices;
namespace FFmpegAnalyzer
{
/// <summary>
/// 编码器
/// </summary>
internal unsafe class FFmpegEncoder
{
/// <param name="srcFrameSize">编码前一帧原始数据的大小</param>
/// <param name="isRgb">rgb数据</param>
/// <param name="detFrameSize">编码后一帧目标数据的大小</param>
public FFmpegEncoder(Size srcFrameSize, bool isRgb , Size detFrameSize )
{
_srcFrameSize = srcFrameSize;
_isRgb = isRgb;
_detFrameSize = detFrameSize == default ? _srcFrameSize : detFrameSize;
_detFrameSize.Width = (_detFrameSize.Width % 2 == 0) ? _detFrameSize.Width : _detFrameSize.Width - 1;
_detFrameSize.Height = (_detFrameSize.Height % 2 == 0) ? _detFrameSize.Height : _detFrameSize.Height - 1;
}
/// <summary>
/// 创建编码器
/// </summary>
public void CreateEncoder(AVCodecID codecFormat)
{
var originPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA;
var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
_pCodec = FFmpeg.avcodec_find_encoder(codecFormat);
if (_pCodec == null)
throw new InvalidOperationException("Codec not found.");
_pCodecContext = FFmpeg.avcodec_alloc_context3(_pCodec);
_pCodecContext->width = _detFrameSize.Width;
_pCodecContext->height = _detFrameSize.Height;
_pCodecContext->framerate = new AVRational { num = 30, den = 1 };
_pCodecContext->time_base = new AVRational {num = 1, den = 30};
_pCodecContext->gop_size = 30;
_pCodecContext->pix_fmt = destinationPixelFormat;
// 设置预测算法
_pCodecContext->flags |= FFmpeg.AV_CODEC_FLAG_PSNR;
_pCodecContext->flags2 |= FFmpeg.AV_CODEC_FLAG2_FAST;
_pCodecContext->max_b_frames = 0;
FFmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryfast", 0);
FFmpeg.av_opt_set(_pCodecContext->priv_data, "tune", "zerolatency", 0);
//打开编码器
FFmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError();
_pConvertContext = FFmpeg.sws_getContext(_srcFrameSize.Width, _srcFrameSize.Height, originPixelFormat, _detFrameSize.Width, _detFrameSize.Height, destinationPixelFormat,
FFmpeg.SWS_BICUBIC, null, null, null);
if (_pConvertContext == null)
throw new ApplicationException("Could not initialize the conversion context.");
var convertedFrameBufferSize = FFmpeg.av_image_get_buffer_size(destinationPixelFormat, _detFrameSize.Width, _detFrameSize.Height, 1);
_convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
_dstData = new BytePtr4();
_dstLineSize = new Int4();
FFmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, _detFrameSize.Width, _detFrameSize.Height, 1);
_isCodecRunning = true;
}
/// <summary>
/// 释放
/// </summary>
public void Dispose()
{
if (!_isCodecRunning) return;
_isCodecRunning = false;
//释放编码器
FFmpeg.avcodec_close(_pCodecContext);
FFmpeg.av_free(_pCodecContext);
//释放转换器
Marshal.FreeHGlobal(_convertedFrameBufferPtr);
FFmpeg.sws_freeContext(_pConvertContext);
}
/// <summary>
/// 编码
/// </summary>
/// <param name="frameBytes"></param>
/// <returns></returns>
public byte[] EncodeFrames(byte[] frameBytes)
{
if (!_isCodecRunning)
{
throw new InvalidOperationException("编码器未运行!");
}
//行跨度:涉及图像内存取值
var rowPitch = _isRgb ? _srcFrameSize.Width * 3 : _srcFrameSize.Width * 4;
fixed (byte* pBitmapData = frameBytes)
{
AVFrame* pFrame = FFmpeg.av_frame_alloc();
pFrame->data = new BytePtr8 {[0] = pBitmapData};
pFrame->linesize = new Int8 { [0] = rowPitch };
pFrame->height = _srcFrameSize.Height;
var rgbToYuv = ConvertToYuv(pFrame, _detFrameSize);
byte[] buffer;
var pPacket = FFmpeg.av_packet_alloc();
try
{
int error;
do
{
FFmpeg.avcodec_send_frame(_pCodecContext, rgbToYuv).ThrowExceptionIfError();
error = FFmpeg.avcodec_receive_packet(_pCodecContext, pPacket);
} while (error == FFmpeg.AVERROR(FFmpeg.EAGAIN));
error.ThrowExceptionIfError();
buffer = new byte[pPacket->size];
Marshal.Copy(new IntPtr(pPacket->data), buffer, 0, pPacket->size);
}
finally
{
FFmpeg.av_frame_unref(rgbToYuv);
FFmpeg.av_packet_unref(pPacket);
}
return buffer;
}
}
/// <summary>
/// 转换成Yuv格式
/// </summary>
/// <param name="waitConvertYuvFrame"></param>
/// <param name="detSize">变化后目标大小</param>
/// <returns></returns>
private AVFrame* ConvertToYuv(AVFrame* waitConvertYuvFrame,Size detSize)
{
FFmpeg.sws_scale(_pConvertContext, waitConvertYuvFrame->data, waitConvertYuvFrame->linesize, 0, waitConvertYuvFrame->height, _dstData, _dstLineSize);
var data = new BytePtr8();
data.UpdateFrom(_dstData);
var lineSize = new Int8();
lineSize.UpdateFrom(_dstLineSize);
FFmpeg.av_frame_unref(waitConvertYuvFrame);
AVFrame* pFrame = FFmpeg.av_frame_alloc();
pFrame->data = data;
pFrame->linesize = lineSize;
pFrame->width = detSize.Width;
pFrame->height = detSize.Height;
pFrame->format = (int) AVPixelFormat.AV_PIX_FMT_YUV420P;
return pFrame;
}
//编码器
private AVCodec* _pCodec;
private AVCodecContext* _pCodecContext;
//转换缓存区
private IntPtr _convertedFrameBufferPtr;
private BytePtr4 _dstData;
private Int4 _dstLineSize;
//格式转换
private SwsContext* _pConvertContext;
//源数据大小
private Size _srcFrameSize;
//源数据大小
private Size _detFrameSize;
//三通道
private readonly bool _isRgb;
//编码器正在运行
private bool _isCodecRunning;
}
}