前面我使用video=screen-capture-recorder实现了录制的相关功能,这一节来讨论下我踩的坑。
1.screen-capture-recorder只能录制主屏幕,如果电脑连接了扩展屏,想要录制扩展屏只能让扩展屏设置成主屏才能
2.screen-capture-recorder无法设置录制的起始位置和大小,只能录制整个屏幕,想要录制某个区域只能先录制整个屏幕在使用ffmpeg命令来裁剪,这个跟组合录制一样。
启动一个进程来完成即可。
3.注意设备的类型,如果是dshow一定要设置dshow ,gdigrab类型的要设置gdigrab,设别的具体类型可以使用ffmpeg -list_devices true -f dshow -i dummy 来显示,前面会显示
这个设备的具体类型
4.想要直接录制扩展屏可以使用desktop + gdigrab来实现,该方式可以指定录制区域且可以支持扩展屏,设置参数一定要靠前如下:
if(m_videoName.contains("desktop"))//录制桌面
{
m_videoDirverName = "gdigrab";
args << "-f" << m_videoDirverName;
args << "-offset_x" << QString::number(m_recordArea.left());
args << "-offset_y" << QString::number(m_recordArea.top());
args << "-video_size" <<QString::number(m_recordArea.width()) + "x" + QString::number(m_recordArea.height());
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-draw_mouse" << "1";
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
上面的参数不详解了,ffmpeg文档都有,不知道的看一下ffmpeg相关书籍就知道了,主要是一些解码器,采样率,编码器,帧率等设置。
5.如果录制的方式不支持某种参数设置一定不要设置,会失败,ffmpeg并不会抛弃参数。
例如使用video=screen-capture-recorder录屏,video=Integrated Webcam录制摄像头,title=Qt Creator录制窗口均不能设置起始位置和大小,设置就会录制失败。
6.录制屏幕会把屏幕窗口中的所有过程全部录制。
归类:
video=screen-capture-recorder 录屏,不支持区域录制,只能全屏录制再裁剪且不支持扩展屏,设备为dshow video=Integrated Webcam 录制摄像头,设备为dshow,不能设置区域录制相关参数 title=Qt Creator 录制窗口,设备为dshow, 其他与录制摄像头类似 desktop 录屏,设备为gdigrab, 可以实现区域录制,支持扩展屏
QStringList FFmpegCommandRecord::commandParama()
{
QStringList args;
if(m_videoName.contains("video="))//录制摄像头或者虚拟桌面
{
m_videoDirverName = "dshow";
args << "-f" << m_videoDirverName;
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-draw_mouse" << "1";
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
else if(m_videoName.contains("desktop"))//录制桌面
{
m_videoDirverName = "gdigrab";
args << "-f" << m_videoDirverName;
args << "-offset_x" << QString::number(m_recordArea.left());
args << "-offset_y" << QString::number(m_recordArea.top());
args << "-video_size" <<QString::number(m_recordArea.width()) + "x" + QString::number(m_recordArea.height());
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-draw_mouse" << "1";
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
else if(m_videoName.contains("title="))//录制窗口
{
m_videoDirverName = "gdigrab";
args << "-f" << m_videoDirverName;
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-draw_mouse" << "1";
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
else
{
m_videoDirverName = "dshow";
}
return args;
}
最后奉上demo的代码
class FFmpegCommandRecord : public Record
{
Q_OBJECT
public:
explicit FFmpegCommandRecord(QObject *parent = nullptr);
virtual ~FFmpegCommandRecord() override;
virtual void startRecord() override;
virtual void stopRecord() override;
virtual void pauseRecord() override;
virtual void restoreRecord()override{}
private:
QStringList commandParama();
void clear();
private:
QProcess *m_pRecordProcess{nullptr};
QFile* m_pConcatFile{nullptr};
QString m_cacheDir;
QString m_concatFileName;
QStringList m_currentRecordTempFiles;
};
//CPP
#include "ffmpegcommandrecord.h"
#include <QProcess>
#include <QDateTime>
#include <QTextStream>
#include <QDebug>
#include <QApplication>
#include <QFile>
#include <QDir>
#include <QTextCodec>
FFmpegCommandRecord::FFmpegCommandRecord(QObject *parent ):Record(parent)
{
m_cacheDir = QApplication::applicationDirPath() +"/FFmpegCommandCache";
QDir dir(m_cacheDir);
if(!dir.exists())
{
dir.mkpath(m_cacheDir);
}
}
FFmpegCommandRecord::~FFmpegCommandRecord()
{
clear();
}
void FFmpegCommandRecord::startRecord()
{
if(m_bStopRecord)
{
clear();
}
if(m_pRecordProcess == nullptr)
{
m_pRecordProcess = new QProcess(this);
//结束录制->合成视频
connect(m_pRecordProcess, static_cast<void(QProcess::*)(int, QProcess::ExitStatus)>(&QProcess::finished),
[this](int exitCode, QProcess::ExitStatus exitStatus) {
if(!m_bStopRecord)
return;
QProcess* mergeProcess = new QProcess(this);
connect(mergeProcess, static_cast<void(QProcess::*)(int, QProcess::ExitStatus)>(&QProcess::finished),
[mergeProcess, this](int exitCode, QProcess::ExitStatus exitStatus) {
qDebug() << QStringLiteral("成功合成视频");
qDebug() << mergeProcess->readAllStandardError();
if(m_currentRecordTempFiles.count()>3)
m_currentVideoPath = m_currentRecordTempFiles.at(2);
clear();
});
if(m_currentRecordTempFiles.count()<3)
return ;
QStringList args;
args << "-f" << "concat";
args << "-safe" << "0";
args << "-i" << m_currentRecordTempFiles.at(0);
args << "-c" << "copy";
args << m_currentRecordTempFiles.at(2);
//合成视频
mergeProcess->start("ffmpeg", args);
});
}
QStringList args = commandParama();
if(m_pConcatFile == nullptr)
{
QString path = QString("%1/%2_tmp").arg(m_cacheDir).arg(QDateTime::currentDateTime().toMSecsSinceEpoch());
m_pConcatFile = new QFile(path);
if (!m_pConcatFile->open(QIODevice::ReadWrite | QIODevice::Text|QIODevice::Truncate))
qDebug() << "File open error";
m_currentRecordTempFiles<<path;
}
QString tmpFilePath = QString("v%1_tmp").arg(QDateTime::currentDateTime().toMSecsSinceEpoch());
QTextStream out(m_pConcatFile);
out << "file" << " '" << tmpFilePath << "'\n";
QString tmpFileAbsolutePath = m_cacheDir + "/"+ tmpFilePath;
args << tmpFileAbsolutePath;
m_currentRecordTempFiles<<tmpFileAbsolutePath;
m_currentRecordTempFiles<<tmpFileAbsolutePath.replace("_tmp", ".mp4");
m_pRecordProcess->start("ffmpeg", args);
m_bStopRecord = false;
}
QStringList FFmpegCommandRecord::commandParama()
{
QStringList args;
if(m_videoName.contains("video="))//录制摄像头或者虚拟桌面
{
m_videoDirverName = "dshow";
args << "-f" << m_videoDirverName;
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-draw_mouse" << "1";
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
else if(m_videoName.contains("desktop"))//录制桌面
{
m_videoDirverName = "gdigrab";
args << "-f" << m_videoDirverName;
args << "-offset_x" << QString::number(m_recordArea.left());
args << "-offset_y" << QString::number(m_recordArea.top());
args << "-video_size" <<QString::number(m_recordArea.width()) + "x" + QString::number(m_recordArea.height());
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-draw_mouse" << "1";
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
else if(m_videoName.contains("title="))//录制窗口
{
m_videoDirverName = "gdigrab";
args << "-f" << m_videoDirverName;
args << "-i" << m_videoName;
args << "-f" << m_audioDirverName;
args << "-i" << m_audioName;
args << "-draw_mouse" << "1";
args << "-pix_fmt" << "yuv420p";
args << "-vcodec" << "libx264";
args << "-acodec" << "aac";
args << "-s" << QString::number(m_videoSize.width()) + "x" + QString::number(m_videoSize.height());
args << "-r" << QString::number(m_nFps);
args << "-q" << "10";
args << "-ar" << "44100";
args << "-ac" << "2";
args << "-tune" << "zerolatency";
args << "-preset" << "ultrafast";
args << "-f" << "mp4";
}
else
{
m_videoDirverName = "dshow";
}
return args;
}
void FFmpegCommandRecord::stopRecord()
{
m_bStopRecord = true;
m_concatFileName = m_pConcatFile->fileName();
m_pConcatFile->close();
m_pConcatFile->deleteLater();
m_pConcatFile = nullptr;
m_pRecordProcess->write("q");
}
void FFmpegCommandRecord::pauseRecord()
{
m_bStopRecord = false;
m_pRecordProcess->write("q");
}
void FFmpegCommandRecord::clear()
{
//删除临时文件
foreach(QString str, m_currentRecordTempFiles)
{
if(str.contains("_tmp"))
{
QFile::remove(str);
}
}
m_currentVideoPath = "";
m_currentRecordTempFiles.clear();
}