FFmpeg实现捕获桌面摄像头和音频

获取设备名

//ffmpeg 版本5.1 低版本 获取dshow设备名可能未实现
void find_video_device(){
        avdevice_register_all();
        const AVInputFormat * ifmt=av_find_input_format("dshow");
        AVDeviceInfoList* list; //设备描述列表
        int devices_num=avdevice_list_input_sources(ifmt,"dshow",NULL,&list);
        String cur_video,cur_audio;
        for(int i=0;i<devices_num;i++){
            if(*(list->devices[i]->media_types)==(AVMediaType::AVMEDIA_TYPE_VIDEO)){ 
            cur_video=list->devices[i]->device_description;
            }else if(*(list->devices[i]->media_types)==(AVMediaType::AVMEDIA_TYPE_AUDIO)){
            cur_audio=list->devices[i]->device_description;
            }
        }

        avdevice_free_list_devices(&list);
}

打开设备以及获取信息进行编解码 (dshow)

void  open_v_device_decode(String s){
        char video_name[6+s.size()]="video=";
        int index=6;
        std::string cur=s.toStdString();
        for(auto c:cur) video_name[index++]=c;
        AVDictionary* options = NULL;
        //摄像头采集选项
        av_dict_set(&options,"fps","25",0); 
        /*音频采集选项
         *av_dict_set(&options, "sample_size", "16", 0);
		 *av_dict_set(&options, "channels", "2", 0);
		 *av_dict_set(&options, "sample_rate", "44100", 0);
		 *av_dict_set(&options, "audio_buffer_size", "40", 0); 
		 *若为实时音频需要调小,这是采集缓冲区,即几毫秒发送,默认500ms
        */
        int ret=avformat_open_input(&pFormatCtx,video_name,ifmt,&options);
        if(ret!=0){
            qWarning()<< ret << "Couldn't open input stream.";
            return ;
        }
        ret=avformat_find_stream_info(pFormatCtx,NULL);
        if(ret<0)
        {
            qWarning()<<"Couldn't find stream information."<<ret;
            return ;
        }
        int videoindex=-1;
        uint i;
        for(i=0; i<pFormatCtx->nb_streams; i++)
            if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO)
            {
                videoindex=i;
                break;
            }
        if(videoindex==-1)
        {
            qWarning()<<"Couldn't find a video stream.";
            return ;
        }
        const AVCodec* pCodec=avcodec_find_decoder(pFormatCtx->streams[videoindex]->codecpar->codec_id);//视频解码器
        AVCodecContext* pCodecCtx=avcodec_alloc_context3(pCodec);//解码上下文
        if ((ret = avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar)) < 0) return ;//从输入上下文获取编码参数
        pCodecCtx->thread_count=8; //解码线程数目
        //打开解码器
        ret = avcodec_open2(pCodecCtx,pCodec,nullptr);
        if(ret < 0){
            qDebug() << QStringLiteral("打开解码器失败");
            return;
        }
}
//获取其原始数据 一般获取到的为yuv格式图像和PCM数据 
//若要实时传输需要进一步重采样和编码
/*      AVPacket *pkt=(AVPacket *)av_malloc(sizeof(AVPacket));
        AVFrame	*pFrameYUYV=av_frame_alloc();
        if(av_read_frame(pFormatCtx,pkt)<0){
            av_packet_free(&pkt);
            av_frame_free(&pFrameYUYV);
            //quit
        }
        ret = avcodec_send_packet(pCodecCtx, pkt);
        if (ret != 0){
             qDebug() << QStringLiteral("receive pkt from video is failed");
             //quit
          }
         ret =avcodec_receive_frame(pCodecCtx, pFrameYUYV);
         if (ret != 0){
             qDebug() << QStringLiteral("receive pAVFrame is failed");
             av_frame_unref(pFrameYUYV);
            //quit
        }
        */

进行录屏获取图像信息(gdibrab)

    avdevice_register_all();
    // gdigrab 选项
    AVDictionary* options = NULL;
    pFormatCtx = avformat_alloc_context();
     // frame rate
     av_dict_set(&options,"framerate","25",0);
    //与边界相隔多少像素
     av_dict_set(&options,"offset_x","20",0);
     av_dict_set(&options,"offset_y","40",0);
    const AVInputFormat *ifmt=av_find_input_format("gdigrab");
    if(avformat_open_input(&pFormatCtx,"desktop",ifmt,&options)!=0){
        qWarning() << "Couldn't open input stream(screen record).";
        return ;
    }
    if(avformat_find_stream_info(pFormatCtx,NULL)<0)
    {
        qWarning()<<"Couldn't find stream information.";
        return ;
    }
    //以下同dshow获取stream信息,解码相同
  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
要使用Qt和FFmpeg实现网络摄像头视频录制,您需要做以下几个步骤: 1. 安装FFmpeg库。您可以从官方网站下载预编译的二进制文件,或者自己编译源代码。 2. 在Qt项目中导入FFmpeg库。您可以使用Qt的插件系统或手动链接库文件。 3. 创建一个Qt界面,用于显示摄像头捕捉到的视频流。 4. 使用FFmpeg库编写代码,将摄像头捕捉到的视频流转换为所需的格式并保存到本地文件。 以下是一个简单的示例代码,演示如何使用Qt和FFmpeg实现网络摄像头视频录制: ```cpp #include <QApplication> #include <QMainWindow> #include <QVBoxLayout> #include <QLabel> #include <QTimer> #include <QDebug> #include <QDateTime> #include <QThread> extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" } #define WIDTH 640 #define HEIGHT 480 #define FPS 25 class CameraWidget : public QWidget { public: CameraWidget(QWidget *parent = nullptr) : QWidget(parent) { label_ = new QLabel(this); QVBoxLayout *layout = new QVBoxLayout(this); layout->addWidget(label_); setLayout(layout); } virtual ~CameraWidget() {} void start() { timer_ = new QTimer(this); connect(timer_, SIGNAL(timeout()), this, SLOT(update())); timer_->start(1000/FPS); } void stop() { timer_->stop(); delete timer_; timer_ = nullptr; } protected: void update() { // Capture camera frame QImage image(WIDTH, HEIGHT, QImage::Format_RGB888); for (int y = 0; y < HEIGHT; y++) { for (int x = 0; x < WIDTH; x++) { QRgb color = qRgb(qrand() % 256, qrand() % 256, qrand() % 256); image.setPixel(x, y, color); } } // Display camera frame label_->setPixmap(QPixmap::fromImage(image)); // Save camera frame to file if (recording_) { if (!formatContext_) { initFormatContext(); } if (formatContext_ && videoStream_) { AVPacket packet; av_init_packet(&packet); packet.data = nullptr; packet.size = 0; if (av_new_packet(&packet, WIDTH * HEIGHT * 3) < 0) { qWarning() << "Failed to allocate packet"; } SwsContext *swsContext = sws_getContext(WIDTH, HEIGHT, AV_PIX_FMT_RGB24, WIDTH, HEIGHT, videoStream_->codecpar->format, SWS_BILINEAR, nullptr, nullptr, nullptr); if (swsContext) { uint8_t *srcData[4] = {(uint8_t *)image.bits(), nullptr, nullptr, nullptr}; int srcLinesize[4] = {image.bytesPerLine(), 0, 0, 0}; uint8_t *dstData[4] = {nullptr, nullptr, nullptr, nullptr}; int dstLinesize[4] = {0, 0, 0, 0}; av_image_alloc(dstData, dstLinesize, videoStream_->codecpar->width, videoStream_->codecpar->height, videoStream_->codecpar->format, 1); if (dstData[0] && av_image_fill_arrays(videoFrame_->data, videoFrame_->linesize, dstData[0], videoStream_->codecpar->format, videoStream_->codecpar->width, videoStream_->codecpar->height, 1) >= 0) { sws_scale(swsContext, srcData, srcLinesize, 0, HEIGHT, videoFrame_->data, videoFrame_->linesize); videoFrame_->pts = pts_++; if (avcodec_send_frame(videoCodecContext_, videoFrame_) == 0) { while (avcodec_receive_packet(videoCodecContext_, &packet) == 0) { packet.stream_index = videoStream_->index; packet.pts = av_rescale_q(packet.pts, videoCodecContext_->time_base, videoStream_->time_base); packet.dts = av_rescale_q(packet.dts, videoCodecContext_->time_base, videoStream_->time_base); packet.duration = av_rescale_q(packet.duration, videoCodecContext_->time_base, videoStream_->time_base); packet.pos = -1; if (av_interleaved_write_frame(formatContext_, &packet) < 0) { qWarning() << "Failed to write packet"; } av_packet_unref(&packet); } } } av_freep(&dstData[0]); sws_freeContext(swsContext); } av_packet_unref(&packet); } } } void initFormatContext() { QString filename = QDateTime::currentDateTime().toString("yyyy-MM-dd_hh-mm-ss"); filename = QDir::tempPath() + "/" + filename + ".avi"; if (avformat_alloc_output_context2(&formatContext_, nullptr, "avi", filename.toUtf8().constData()) < 0) { qWarning() << "Failed to allocate output format context"; return; } if (avio_open(&formatContext_->pb, filename.toUtf8().constData(), AVIO_FLAG_WRITE) < 0) { qWarning() << "Failed to open output file"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } videoStream_ = avformat_new_stream(formatContext_, nullptr); if (!videoStream_) { qWarning() << "Failed to create video stream"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } videoCodecContext_ = avcodec_alloc_context3(nullptr); if (!videoCodecContext_) { qWarning() << "Failed to allocate codec context"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } videoCodecContext_->codec_id = AV_CODEC_ID_H264; videoCodecContext_->codec_type = AVMEDIA_TYPE_VIDEO; videoCodecContext_->width = WIDTH; videoCodecContext_->height = HEIGHT; videoCodecContext_->pix_fmt = AV_PIX_FMT_YUV420P; videoCodecContext_->time_base = av_make_q(1, FPS); videoStream_->codecpar->codec_id = AV_CODEC_ID_H264; videoStream_->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; videoStream_->codecpar->width = WIDTH; videoStream_->codecpar->height = HEIGHT; videoStream_->codecpar->format = videoCodecContext_->pix_fmt; videoStream_->time_base = av_make_q(1, FPS); if (avcodec_open2(videoCodecContext_, avcodec_find_encoder(videoCodecContext_->codec_id), nullptr) < 0) { qWarning() << "Failed to open codec"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } if (avformat_write_header(formatContext_, nullptr) < 0) { qWarning() << "Failed to write header"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } videoFrame_ = av_frame_alloc(); if (!videoFrame_) { qWarning() << "Failed to allocate frame"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } videoFrame_->format = videoCodecContext_->pix_fmt; videoFrame_->width = WIDTH; videoFrame_->height = HEIGHT; if (av_frame_get_buffer(videoFrame_, 32) < 0) { qWarning() << "Failed to allocate frame buffer"; avformat_free_context(formatContext_); formatContext_ = nullptr; return; } pts_ = 0; } private: QLabel *label_; QTimer *timer_ = nullptr; bool recording_ = false; AVFormatContext *formatContext_ = nullptr; AVStream *videoStream_ = nullptr; AVCodecContext *videoCodecContext_ = nullptr; AVFrame *videoFrame_ = nullptr; int64_t pts_ = 0; }; int main(int argc, char *argv[]) { QApplication a(argc, argv); QMainWindow mainWindow; CameraWidget cameraWidget(&mainWindow); mainWindow.setCentralWidget(&cameraWidget); cameraWidget.start(); QThread::sleep(10); cameraWidget.stop(); return a.exec(); } ``` 该示例代码使用Qt中的QTimer类定期捕获摄像头的视频流,并将其显示在界面上。如果设置recording_为true,则使用FFmpeg库将当前帧转换为指定格式并写入文件。请注意,此示例代码仅是演示如何使用Qt和FFmpeg实现网络摄像头视频录制的示例,您需要根据您的实际需求进行修改和调整。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值