屏幕录制在h323上的实现

记录下自己在h323框架中结合ffmpeg的gdigrab实现屏幕录制和程序共享的经历。

代码如下

CScreenInputDevice.h:

#ifndef _FFMPEG_SCREEN_INPUT_DEVICE_H_
#define _FFMPEG_SCREEN_INPUT_DEVICE_H_


#ifdef P_USE_PRAGMA
#pragma interface
#endif


#include <ptlib.h>
#include <ptlib/videoio.h>
#include <ptclib/delaychan.h>


#ifdef  __cplusplus  
extern "C"  
{  
#endif  
#include "libavcodec/avcodec.h"  
#include "libavformat/avformat.h"  
#include "libswscale/swscale.h"  
#include "libavdevice/avdevice.h"  
#include "libavutil/imgutils.h" 
 
#ifdef __cplusplus  
};
#endif 


///
//
// This class defines a video capture (input) device that reads video from an FFMpeg command
//


void ffmpeg_screen_capture_init();


class PVideoInputDevice_FFMPEGScreen : public PVideoInputDevice
{
PCLASSINFO(PVideoInputDevice_FFMPEGScreen, PVideoInputDevice);
public:
/** Create a new file based video input device.
*/
PVideoInputDevice_FFMPEGScreen();


/** Destroy video input device.
*/
~PVideoInputDevice_FFMPEGScreen();




/**Open the device given the device name.
*/
BOOL Open(
const PString & deviceName,   /// Device name to open
BOOL startImmediate = true    /// Immediately start device
);


/**Determine of the device is currently open.
*/
BOOL IsOpen() ;


/**Close the device.
*/
BOOL Close();


/**Start the video device I/O.
*/
BOOL Start();


/**Stop the video device I/O capture.
*/
BOOL Stop();


/**Determine if the video device I/O capture is in progress.
*/
BOOL IsCapturing();


/**Get a list of all of the drivers available.
*/
static PStringList GetInputDeviceNames();


virtual PStringList GetDeviceNames() const
{ return GetInputDeviceNames(); }


/**Retrieve a list of Device Capabilities
*/
static bool GetDeviceCapabilities(
const PString & /*deviceName*/, ///< Name of device
Capabilities * /*caps*/         ///< List of supported capabilities
) { return false; }


/**Get the maximum frame size in bytes.


Note a particular device may be able to provide variable length
frames (eg motion JPEG) so will be the maximum size of all frames.
*/
virtual PINDEX GetMaxFrameBytes();


/**Grab a frame. 


There will be a delay in returning, as specified by frame rate.
*/
virtual BOOL GetFrameData(
BYTE * buffer,                 /// Buffer to receive frame
PINDEX * bytesReturned = NULL  /// Optional bytes returned.
);


/**Grab a frame.


Do not delay according to the current frame rate.
*/
virtual BOOL GetFrameDataNoDelay(
BYTE * buffer,                 /// Buffer to receive frame
PINDEX * bytesReturned = NULL  /// OPtional bytes returned.
);




/**Set the video format to be used.


Default behaviour sets the value of the videoFormat variable and then
returns the IsOpen() status.
*/
virtual BOOL SetVideoFormat(
VideoFormat videoFormat   /// New video format
);


/**Get the number of video channels available on the device.


Default behaviour returns 1.
*/
virtual int GetNumChannels() ;


/**Set the video channel to be used on the device. 
*/
virtual BOOL SetChannel(
int channelNumber  /// New channel number for device.
);


/**Set the colour format to be used.


Default behaviour sets the value of the colourFormat variable and then
returns the IsOpen() status.
*/
virtual BOOL SetColourFormat(
const PString & colourFormat   // New colour format for device.
);


/**Set the video frame rate to be used on the device.


Default behaviour sets the value of the frameRate variable and then
return the IsOpen() status.
*/
virtual BOOL SetFrameRate(
unsigned rate  /// Frames per second
);


/**Get the minimum & maximum size of a frame on the device.


Default behaviour returns the value 1 to UINT_MAX for both and returns
false.
*/
virtual BOOL GetFrameSizeLimits(
unsigned & minWidth,   /// Variable to receive minimum width
unsigned & minHeight,  /// Variable to receive minimum height
unsigned & maxWidth,   /// Variable to receive maximum width
unsigned & maxHeight   /// Variable to receive maximum height
) ;


/**Set the frame size to be used.


Default behaviour sets the frameWidth and frameHeight variables and
returns the IsOpen() status.
*/
virtual BOOL SetFrameSize(
unsigned width,   /// New width of frame
unsigned height   /// New height of frame
);


void ClearMapping() { return ; }




protected:
PString        m_device;
AVInputFormat *m_iformat;
AVFormatContext *m_ic;
AVCodecContext *m_codecCtx;
AVCodec *m_Codec;
struct SwsContext *img_convert_ctx;
AVFrame *m_frame,*m_frameYUV;


BOOL           m_opened;
unsigned       m_ffmpegFrameWidth;
unsigned       m_ffmpegFrameHeight;
unsigned       m_ffmpegFrameSize;
PINDEX         m_videoFrameSize;
unsigned       grabCount;
PAdaptiveDelay pacing;
};






#endif // PTLIB_PVFILEDEV_H




// End Of File ///


CScreenInputDevice.cpp:

#include <ptlib.h>

#include <ptlib/vconvert.h>
#include <ptlib/pfactory.h>
#include <ptlib/pluginmgr.h>
#include <ptlib/videoio.h>


#include "CScreenInputDevice.h"


#pragma comment(lib, "avcodec.lib")  
#pragma comment(lib, "avformat.lib")  
#pragma comment(lib, "avutil.lib")  
#pragma comment(lib, "avdevice.lib")  
#pragma comment(lib, "avfilter.lib")  
#pragma comment(lib, "swscale.lib") 


#define new PNEW


static const char DefaultDeviceName[] ="desktop";


void ffmpeg_screen_capture_init()
{
av_register_all();
avformat_network_init();
avdevice_register_all();
}


///
// PVideoInputDevice_FFMPEGScreen


class PVideoInputDevice_FFMPEGScreen_PluginServiceDescriptor : public PDevicePluginServiceDescriptor
{
public:
virtual PObject * CreateInstance(int /*userData*/) const
{
return new PVideoInputDevice_FFMPEGScreen;
}
virtual PStringList GetDeviceNames(int /*userData*/) const
{
return PVideoInputDevice_FFMPEGScreen::GetInputDeviceNames();
}
virtual bool ValidateDeviceName(const PString & deviceName, int /*userData*/) const
{
return deviceName == DefaultDeviceName;
}
} PVideoInputDevice_FFMPEGScreen_descriptor;


PCREATE_PLUGIN(FFMPEGScreen, PVideoInputDevice, &PVideoInputDevice_FFMPEGScreen_descriptor);




PVideoInputDevice_FFMPEGScreen::PVideoInputDevice_FFMPEGScreen() :m_opened(FALSE)
{
SetColourFormat("YUV420P");
channelNumber = 0; 
grabCount = 0;
deviceName = DefaultDeviceName;


m_frame = m_frameYUV = NULL;
m_codecCtx = NULL;
m_Codec = NULL;
img_convert_ctx = NULL;
m_ic = NULL;
m_iformat = NULL;


PTRACE(9, "colour format: "<< colourFormat);


SetFrameRate(10);
}




PVideoInputDevice_FFMPEGScreen::~PVideoInputDevice_FFMPEGScreen()
{
Close();
}




BOOL PVideoInputDevice_FFMPEGScreen::Open(const PString & _deviceName, BOOL /*startImmediate*/)
{
Close();


m_device = _deviceName;


m_iformat = av_find_input_format("gdigrab");
if(!m_iformat)
return false;


m_frame=av_frame_alloc();
m_frameYUV=av_frame_alloc();


m_opened = TRUE;
return true;    
}




BOOL PVideoInputDevice_FFMPEGScreen::IsOpen() 
{
return m_opened;
}




BOOL PVideoInputDevice_FFMPEGScreen::Close()
{
m_opened = FALSE;
if(m_iformat)
{
m_iformat = NULL;
}
if(m_frame)
{
av_frame_free(&m_frame);
av_frame_free(&m_frameYUV);
m_frame = m_frameYUV = NULL;
}
if(m_codecCtx)
{
avcodec_close(m_codecCtx);
m_codecCtx = NULL;
}
if(m_ic)
{
avformat_close_input(&m_ic);
m_ic = NULL;
}
if(img_convert_ctx)
{
sws_freeContext(img_convert_ctx);
img_convert_ctx = NULL;
}
return true;
}




BOOL PVideoInputDevice_FFMPEGScreen::Start()
{
return true;
}




BOOL PVideoInputDevice_FFMPEGScreen::Stop()
{
return true;
}




BOOL PVideoInputDevice_FFMPEGScreen::IsCapturing()
{
return IsOpen();
}




PStringList PVideoInputDevice_FFMPEGScreen::GetInputDeviceNames()
{
return PString(DefaultDeviceName);
}




BOOL PVideoInputDevice_FFMPEGScreen::SetVideoFormat(VideoFormat newFormat)
{
return PVideoDevice::SetVideoFormat(newFormat);
}




int PVideoInputDevice_FFMPEGScreen::GetNumChannels() 
{
return 1;  
}




BOOL PVideoInputDevice_FFMPEGScreen::SetChannel(int newChannel)
{
return PVideoDevice::SetChannel(newChannel);
}


BOOL PVideoInputDevice_FFMPEGScreen::SetColourFormat(const PString & newFormat)
{
if (!(newFormat *= "YUV420"))
return false;


return PVideoDevice::SetColourFormat(newFormat);
}




BOOL PVideoInputDevice_FFMPEGScreen::SetFrameRate(unsigned rate)
{
if(rate < 1 || rate > 30)
return FALSE;
return PVideoDevice::SetFrameRate(rate);
}




BOOL PVideoInputDevice_FFMPEGScreen::GetFrameSizeLimits(unsigned & minWidth,
unsigned & minHeight,
unsigned & maxWidth,
unsigned & maxHeight) 
{
minWidth  = maxWidth  = m_ffmpegFrameWidth;
minHeight = maxHeight = m_ffmpegFrameHeight;
GetMaxFrameBytesConverted(m_ffmpegFrameSize);
return true;
}


BOOL PVideoInputDevice_FFMPEGScreen::SetFrameSize(unsigned width, unsigned height)
{
// can't set unless the file is not open
//if (IsOpen())
// return false;


m_ffmpegFrameWidth = width ;
m_ffmpegFrameHeight = height ;
m_ffmpegFrameSize = CalculateFrameBytes(m_ffmpegFrameWidth, m_ffmpegFrameHeight, "BGR32");
return PVideoInputDevice::SetFrameSize(width, height);
}




PINDEX PVideoInputDevice_FFMPEGScreen::GetMaxFrameBytes()
{


return GetMaxFrameBytesConverted(m_ffmpegFrameSize);
}




BOOL PVideoInputDevice_FFMPEGScreen::GetFrameData(BYTE * buffer, PINDEX * bytesReturned)
{    
pacing.Delay(1000/frameRate);    
return GetFrameDataNoDelay(buffer, bytesReturned);
}




BOOL PVideoInputDevice_FFMPEGScreen::GetFrameDataNoDelay(BYTE *destFrame, PINDEX * bytesReturned)
{
//static unsigned char* yuvBuff = NULL;
//static unsigned int num_bytes =0;
grabCount++;


if(!m_ic)
{
char optionStr[64];
AVDictionary* format_opts = NULL;


sprintf(optionStr, "%u", GetFrameRate());
av_dict_set(&format_opts,"framerate",optionStr,0);
sprintf(optionStr, "%ux%u", m_ffmpegFrameWidth, m_ffmpegFrameHeight);
av_dict_set(&format_opts,"video_size",optionStr,0);


/* open the input file with generic avformat function */
m_ic = avformat_alloc_context();
//m_ic->flags |= AVFMT_FLAG_KEEP_SIDE_DATA;
//m_ic->video_codec_id   = AV_CODEC_ID_RAWVIDEO;
//m_ic->audio_codec_id   = AV_CODEC_ID_NONE;
//m_ic->subtitle_codec_id   = AV_CODEC_ID_NONE;
avformat_open_input(&m_ic, deviceName, m_iformat, &format_opts);


int ret = avformat_find_stream_info(m_ic, NULL);
if (ret < 0) {
PTRACE(2, "ScreenInputDevice can not open " << deviceName);
avformat_close_input(&m_ic);
return FALSE;
}


int videoindex=-1;
for(int i=0; i<m_ic->nb_streams; i++) 
{
if(m_ic->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoindex=i;
break;
}
}
if(videoindex==-1)
{
PTRACE(2 , "Didn't find a video stream.");
avformat_close_input(&m_ic);
return FALSE;
}
m_codecCtx=m_ic->streams[videoindex]->codec;
m_Codec=avcodec_find_decoder(m_codecCtx->codec_id); //AV_CODEC_ID_RAWVIDEO
if(m_Codec==NULL)
{
PTRACE(2 , "Codec not found.\n");
avformat_close_input(&m_ic);
return FALSE;
}


if(avcodec_open2(m_codecCtx, m_Codec, NULL)<0)
{
PTRACE(2 , "Could not open codec.\n");
avformat_close_input(&m_ic);
return FALSE;
}


img_convert_ctx = sws_getContext(m_codecCtx->width, m_codecCtx->height, m_codecCtx->pix_fmt,
m_codecCtx->width, m_codecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);


//output
//num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P , m_codecCtx->width ,
// m_codecCtx->height , 1);
//yuvBuff = new unsigned char[num_bytes];
}


BYTE * readBuffer = destFrame;
AVPacket pkt;


int got_picture = 0;


if(av_read_frame(m_ic, &pkt) ==0)
{
avcodec_decode_video2(m_codecCtx, m_frame, &got_picture, &pkt);


if(!got_picture)
{
av_free_packet(&pkt);
printf("no picture 1...\n");
return FALSE;
}


//convert to yuv 420p
av_image_fill_arrays(m_frameYUV->data, m_frameYUV->linesize, readBuffer,    
AV_PIX_FMT_YUV420P, m_codecCtx->width, m_codecCtx->height, 1);


sws_scale(img_convert_ctx, (const uint8_t* const*)m_frame->data, m_frame->linesize, 
0, m_codecCtx->height, m_frameYUV->data, m_frameYUV->linesize);


*bytesReturned = m_ffmpegFrameSize;
av_free_packet(&pkt);
return TRUE;
}


return FALSE;

        //这里使用ffmpeg自带的转换函数,所以没有使用ptlib的converter,
//if (converter == NULL) {
// if (bytesReturned != NULL)
// *bytesReturned = m_ffmpegFrameSize;
//} else {
// converter->SetSrcFrameSize(m_ffmpegFrameWidth, m_ffmpegFrameHeight);
// if (!converter->Convert(readBuffer, destFrame, bytesReturned))
// return false;
// if (bytesReturned != NULL)
// *bytesReturned = converter->GetMaxDstFrameBytes();
//}


//return true;
}


之前一直调试不通都是出现在yuv420p转换后,拷贝数据出去时候使用如下语句:

memcpy(readBuffer, m_frameYUV->data, m_ffmpegFrameSize);

实际应该是

memcpy(readBuffer, m_frameYUV->data[0], m_ffmpegFrameSize);


使用av_image_fill_arrays()后自动映射YUV的首地址在readBuffer的连续地址空间中,如果不用ffmpeg带的yuv转换函数也是可以的,

这时需要将抓屏的图像像素设置为BGR32,即构造函数中的第一句话是SetColourFormat("BGR32");同时修改SetColourFormat();函数的实现,

只能设置为BGR32格式,其它格式都返回false,这样h323框架自动会创建一个converter完成颜色空间的转换。如果需要程序共享,open()传入

的设备名为 title=程序窗口名 。实际上ptlib自带有程序共享的代码,实现代码在ptlib\src\ptlib\msos\vidinput_app.cxx ,编译需要开启P_APPSHARE宏定义


在h323的endpoint中openvideochannel的代码大致如下:

BOOL CEndPoint::OpenVideoChannel(H323Connection & connection,
BOOL isEncoding,
H323VideoCodec & codec)
{

PVideoChannel   * channel = new PVideoChannel;

PVideoInputDevice * grabber = NULL;

if (isEncoding) 
{// Transmitter part, video input device

grabber = new PVideoInputDevice_FFMPEGScreen();

if (grabber->Open("desktop", FALSE) ||
!grabber->SetColourFormatConverter("YUV420P") || 
!grabber->SetFrameSize(width, height) || 
!grabber->SetVFlipState(localFlip))
{

//open fail

return false;

}

grabber->SetFrameRate(10);
grabber->Start();
channel->AttachVideoReader(grabber);

}

else

{//video output decoder

}
return codec.AttachChannel(channel,TRUE);
}

可以使用 `weston-screenshooter` 配合 GStreamer 实现屏幕录制。下面是一个使用 `weston-screenshooter` 和 GStreamer 进行屏幕录制的示例: ```bash #!/bin/bash # 使用 gst-launch-1.0 构建管道 gst-launch-1.0 \ fdsrc ! \ videoparse format=rgb,width=1920,height=1080 ! \ videoconvert ! \ queue ! \ x264enc ! \ queue ! \ mp4mux ! \ filesink location=output.mp4 \ -e & # 等待片刻,确保管道启动完成 sleep 1 # 启动 weston-screenshooter 并将截图数据写入管道 weston-screenshooter -f raw | \ gst-launch-1.0 \ fdsrc ! \ videoparse format=rgb,width=1920,height=1080 ! \ videoconvert ! \ queue ! \ videorate ! \ video/x-raw,framerate=30/1 ! \ queue ! \ x264enc ! \ queue ! \ mp4mux ! \ filesink location=output.mp4 # 等待一段时间,让GStreamer管道完成处理并写入文件 sleep 5 # 终止 GStreamer 管道 killall gst-launch-1.0 ``` 上面的代码中,使用 `gst-launch-1.0` 构建了一个简单的 GStreamer 管道,包括 `fdsrc` 插件(用于从文件描述符读取数据)、`videoparse` 插件(用于将 RGB 格式的截图数据解析为视频格式)、`videoconvert` 插件(用于转换视频格式)、`x264enc` 插件(用于进行 H.264 编码)、`mp4mux` 插件(用于将编码后的视频写入 mp4 文件)和 `filesink` 插件(用于指定输出文件)。 然后,在启动 `weston-screenshooter` 后,将截图数据通过管道写入 GStreamer 管道中进行处理和编码。在这里,使用了 `videorate` 插件将视频帧率设置为 30fps,并在管道中添加了两个 `queue` 元素,用于控制数据流的速度和流量。 需要注意的是,在使用 `weston-screenshooter` 和 GStreamer 进行屏幕录制时,需要考虑系统资源占用和性能问题,可以根据实际需求和硬件条件进行调整。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值