视频流通过广播进行传输
设备需要将视频转化为可以传输的视频流,视频流可以通过各自不同的协议进行传输,如果传输到网页就使用http协议。本文章只提供基于Qt将设备捕捉的信息转化为视频流,以广播的形式传输到局域网中,只提供大体思路。
注意:本文没有使用协议,只是使用了udp广播发送视频与接收。
设备端.h文件
#ifndef DIALOG_H
#define DIALOG_H
namespace Ui {
class Dialog;
}
class Dialog : public QDialog
{
Q_OBJECT
public:
explicit Dialog(QWidget *parent = 0);
~Dialog();
private slots:
void processCapturedImage(int requestId, const QImage &img);
void captureImage(); // 添加这个槽函数声明
// void captureFrame();
// void sendFrame();
void socketDisconnected();
void onNewConnection() ;
void processMessage(const QString &message);
void sendVideoFrame(int requestId, const QImage &img);
void socketDisconnect();
private:
Ui::Dialog *ui;
QUdpSocket *udpSender;
QTimer *videoCaptureTimer; // 声明视频捕获定时器
QTimer *sendFrameTimer; // 声明视频发送定时器
QCamera *camera;
QCameraImageCapture *imageCapture;
QTimer *timer;
QWebSocketServer *server;
QList<QWebSocket *> m_clients;
QWebSocket *clientSocket;
};
#endif // DIALOG_H
.cpp文件
#include "dialog.h"
#include "ui_dialog.h"
Dialog::Dialog(QWidget *parent) :
QDialog(parent),
ui(new Ui::Dialog)
{
ui->setupUi(this);
udpSender=new QUdpSocket(this);
// 创建WebSocket服务器
server = new QWebSocketServer(QStringLiteral("Video Server"), QWebSocketServer::NonSecureMode, this);
if (server->listen(QHostAddress::Any, 8888)) {
qDebug() << "WebSocket server start listen on port 8888";
connect(server, SIGNAL(newConnection()), this, SLOT(onNewConnection()));
}
// 创建并配置摄像头
camera = new QCamera(this);
imageCapture = new QCameraImageCapture(camera, this);
camera->setCaptureMode(QCamera::CaptureStillImage);
camera->start();
//开启捕捉
connect(imageCapture, &QCameraImageCapture::imageCaptured,this, &Dialog::processCapturedImage); // 确保这里的信号和槽签名匹配
// 设置定时器以定期捕获图像帧
timer = new QTimer(this);
connect(timer, SIGNAL(timeout()),this,SLOT(captureImage()));
timer->start(33); // 每33毫秒捕获一帧,相当于30FPS
}
Dialog::~Dialog()
{
delete ui;
delete imageCapture;
delete camera;
}
void Dialog::onNewConnection()
{
QWebSocket *client = server->nextPendingConnection();
connect(client, &QWebSocket::textMessageReceived, this, &Dialog::processMessage);
//connect(client, SIGNAL(disconnected()), client, SLOT(socketDisconnect()));
m_clients << client;
}
//处理获取的数据
void Dialog::processMessage(const QString &message)
{
clientSocket = qobject_cast<QWebSocket *>(sender());
if (clientSocket && message == "START_VIDEO") {
qDebug()<<message; // 开始发送视频帧给客户端
connect(imageCapture, &QCameraImageCapture::imageCaptured, this, &Dialog::sendVideoFrame); // 确保这里的信号和槽签名匹配 {
} else if (clientSocket && message == "STOP_VIDEO") {// 停止发送视频帧给客户端
disconnect(imageCapture, &QCameraImageCapture::imageCaptured, this, &Dialog::sendVideoFrame);
}
}
//发送frame
void Dialog::sendVideoFrame(int requestId, const QImage &img)
{
QByteArray byteArray;
QBuffer buffer(&byteArray);
img.save(&buffer, "JPEG");
if (clientSocket) {
clientSocket->sendBinaryMessage(byteArray);
clientSocket->flush();
// 不关闭连接
// clientSocket->close();
} else {
for (int i = 0; i < m_clients.length(); i++) {
m_clients[i]->sendBinaryMessage(byteArray);
m_clients[i]->flush();
// 不关闭连接
// m_clients[i]->close();
}
}
}
void Dialog::socketDisconnect()
{
qDebug()<<"disconnect";
// m_clients.removeAll(client);
// client->deleteLater();
}
void Dialog::processCapturedImage(int requestId, const QImage &img)
{
Q_UNUSED(requestId);
//qDebug()<<"start show";
ui->V->setPixmap(QPixmap::fromImage(img));
QByteArray byteArray;
QBuffer buffer(&byteArray);
buffer.open(QIODevice::WriteOnly);
img.save(&buffer, "JPEG"); // 使用JPEG格式压缩图像
// 发送字节数组到UDP套接字
udpSender->writeDatagram(byteArray, QHostAddress::Broadcast, 8888); // 1234是接收端的端口号
}
void Dialog::captureImage()
{
imageCapture->capture();
}
void Dialog::socketDisconnected()
{
clientSocket = qobject_cast<QWebSocket *>(sender());
if (clientSocket) {
m_clients.removeAll(clientSocket);
clientSocket->deleteLater();
}
}
web端
<canvas id="videoCanvas" width="800" height="600"></canvas>
<script>
window.onload = function () {
const canvas = document.getElementById('videoCanvas');
if (!canvas) {
console.error('Canvas element not found');
return;
}
const ctx = canvas.getContext('2d');
const socket = new WebSocket('ws://192.168.31.49:8888');
socket.binaryType = 'arraybuffer';
socket.onopen = () => {
console.log('WebSocket connection opened');
socket.send('START_VIDEO');
};
socket.onmessage = (event) => {
const blob = new Blob([event.data], { type: 'image/jpeg' });
const img = new Image();
img.onload = function () {
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
};
img.src = URL.createObjectURL(blob);
};
socket.onclose = () => {
console.log('WebSocket connection closed');
};
socket.onerror = (error) => {
console.error('WebSocket error:', error);
};
};
</script>