局域网内的音视频通话

voicecallwidget.h

#ifndef VOICECALLWIDGET_H
#define VOICECALLWIDGET_H

#include <QWidget>
#include <QMainWindow>
#include <QUdpSocket>
#include <QAudioInput>
#include <QAudioOutput>
#include <QAudioDeviceInfo>
#include <QAudioFormat>
#include <QIODevice>
#include <QMessageBox>
#include <QString>
#include <QMap>
#include <QThread>
//#include <windows.h>
//#include <WinSock2.h>
#include <memory>
#include <iostream>
#include <QMutex>

//#pragma comment(lib, "Ws2_32.lib")

#include "lyUI/LyDialog.h"
#include "LyMessageBox.h"

extern "C"
{
#include "ffmpeg/libavcodec/avcodec.h"
#include "ffmpeg/libavformat/avformat.h"
#include "ffmpeg/libavutil/pixfmt.h"
#include "ffmpeg/libavutil/imgutils.h"
#include "ffmpeg/libswscale/swscale.h"
}

#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib, "swresample.lib")
#pragma comment(lib, "swscale.lib")


#define SDL_MAIN_HANDLED
extern "C"
{
#include "ffmpeg/SDL2/SDL_config.h"
#include "ffmpeg/SDL2/SDL.h"
}

#ifdef _DEBUG
#pragma comment(lib, "SDL2d.lib")
#else
#pragma comment(lib, "SDL2.lib")
#endif

namespace Ui {
class VoiceCallWidget;
}

struct video {
	int lens = 0;
	char data[1024];
};

class PktSendThread : public QThread
{
	Q_OBJECT
public:
	PktSendThread(QObject* parent = 0);
	~PktSendThread();

	void setIPAndPort(const QString& ip_, quint16 port_);

	void setRecieveFlag(bool f);

protected:
	void run();

private:
	QString m_strIP{ "" };
	quint16 m_uintPort = 0;
	QUdpSocket* m_pktSocket = Q_NULLPTR;

	bool recieveFlag = true;
	QMutex m_mutex;
};

class PktRecieveThread : public QThread
{
	Q_OBJECT
public:
	PktRecieveThread(QObject* parent = 0);
	~PktRecieveThread();

	void setIPAndPort(const QString& ip_, quint16 port_);

	bool Decode(const unsigned char *pData, int nLength);
	void InitCodec();

	void setRecieveFlag(bool f);

public slots:
	void slot_recievePic();

signals:
	void sig_pictureByte(const QImage& image);

protected:
	void run();

private:
	QString m_strIP{ "" };
	quint16 m_uintPort = 0;

	QUdpSocket* m_pktSocket = Q_NULLPTR;

	AVFrame					*m_pFrame;
	AVPacket				*m_pPacket;
	AVCodecContext			*m_pCodecCtx;
	AVCodecParserContext	*m_pCodecParserCtx;

	QByteArray m_dataVideoPic;
	bool recieveFlag = true;
	QMutex m_mutex;
};

class VoiceCallWidget : public LyDialog
{
    Q_OBJECT

public:
    explicit VoiceCallWidget(QWidget *parent = nullptr);
    ~VoiceCallWidget();

signals:
	void signal_hangUp();

public slots:
	void slot_sendAudioData();

	void slot_callRequest();

	void on_pbtnJoinMulticast_clicked();

	void on_pbtnExitMulticast_clicked();

protected:
	void paintEvent(QPaintEvent *event) override;

private slots:
	void on_pbtnSend_clicked();

	void onSocketReadyRead(); // 读取socket传入的数据

	void handleStateChanged(QAudio::State newState);

	void on_buttonVoiceCall_clicked();

	void on_buttonVideoCall_clicked();

	void on_buttonHangUp_clicked();

	void on_buttonOnorOffVideo_clicked();

private:
	void initAudioInput();

	void buttonIsAbled(bool callAble, bool hangUpAble, bool videoAble);

	void OnorOffVideo(bool enableVideo, bool isButtonClicked);

private:
	QUdpSocket *udpSocket;			//用于与连接的客户端通讯的QUdpSocket
	QHostAddress groupAddress;		//组播地址

	QUdpSocket* m_callSocket = Q_NULLPTR;
	QAudioInput* m_audioInput = Q_NULLPTR; // 采集音频
	QAudioOutput* m_audioOutput = Q_NULLPTR; // 播放音频
	QIODevice *m_inputDevice = Q_NULLPTR;
	QIODevice* m_outputDevice = Q_NULLPTR;
	bool meboxFirstBuild = true;
	QString m_curAddress{ "" };
	quint16 m_curPort = 0;
	quint16 m_curVideoPort = 0;
	QHostAddress m_targetIP;
	quint16 m_targetPort = 0;

	PktSendThread* m_pktSendThread = Q_NULLPTR;
	PktRecieveThread* m_pktRecieveThread = Q_NULLPTR;

	QMap<QString, quint16> m_friIPPort;
	QMap<QString, quint16> m_friIPPortVideo;
	bool m_enableVideo = false;

	bool flag = false;

private:
    Ui::VoiceCallWidget *ui;
};

#endif // VOICECALLWIDGET_H

voicecallwidget.cpp

#include "voicecallwidget.h"
#include "ui_voicecallwidget.h"

#include <QString>
#include <QNetworkInterface>
#include <QNetworkDatagram>
#include <QMap>
#include <QRandomGenerator>
#include <QCString.h>

#include <opencv2/opencv.hpp>

bool mat2Packet(cv::Mat& mat_frame, AVFrame* frame, AVCodecContext* codec_ctx, AVPacket*& pkt)
{

	static long long pts_ = 0;
	//将Mat对象转为AVFrame对象
	cv::Mat yuv_frame;
	cv::cvtColor(mat_frame, yuv_frame, cv::COLOR_BGR2YUV_I420);
	/*memcpy(frame->data[0], yuv_frame.data, codec_ctx->width * codec_ctx->height);
	memcpy(frame->data[1], yuv_frame.data + codec_ctx->width * codec_ctx->height, codec_ctx->width * codec_ctx->height);
	memcpy(frame->data[2], yuv_frame.data + codec_ctx->width * codec_ctx->height * 5 / 4, codec_ctx->width * codec_ctx->height / 4);*/
	frame->data[0] = yuv_frame.data;
	frame->data[1] = yuv_frame.data + codec_ctx->width * codec_ctx->height;
	frame->data[2] = yuv_frame.data + codec_ctx->width * codec_ctx->height * 5 / 4;
	frame->pts = pts_++;

	//编码帧
	int ret = avcodec_send_frame(codec_ctx, frame);
	while (ret >= 0)
	{
		ret = avcodec_receive_packet(codec_ctx, pkt);
		if (ret == 0)
			return true;
		else if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
			break;
		else if (ret < 0)
			return false;
	}
	return false;
}

PktSendThread::PktSendThread(QObject* parent /* = 0 */) : QThread(parent)
{
	m_pktSocket = new QUdpSocket(this);
}

PktSendThread::~PktSendThread()
{
	setRecieveFlag(false);
	quit();
	wait();
}

void PktSendThread::setIPAndPort(const QString& ip_, quint16 port_)
{
	m_strIP = ip_;
	m_uintPort = port_;
}

void PktSendThread::setRecieveFlag(bool f)
{
	QMutexLocker locker(&m_mutex);
	recieveFlag = f;
}

void PktSendThread::run()
{
	setRecieveFlag(true);

	AVCodecContext* codec_ctx = nullptr;
	AVCodec* codec = nullptr;
	AVFrame* frame = nullptr;
	AVPacket* pkt = nullptr;

	// 初始化libavformat(所有格式),注册所有复用器/解复用器
	av_register_all();

	// 初始化FFmpeg网络模块
	//avformat_network_init();
	codec = avcodec_find_encoder(AV_CODEC_ID_H264);

	//创建AVCodecContect对象
	codec_ctx = avcodec_alloc_context3(codec);
	codec_ctx->codec_id = AV_CODEC_ID_H264;
	codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
	codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
	codec_ctx->width = 1024;
	codec_ctx->height = 768;
	codec_ctx->delay = 1;
	codec_ctx->gop_size = 250;
	codec_ctx->max_b_frames = 0;
	codec_ctx->time_base = { 1,30 };
	codec_ctx->framerate = { 30, 1 };
	codec_ctx->bit_rate = 1000000;

	AVCodecParameters *pAVCodecParameters = nullptr;
	{
		pAVCodecParameters = avcodec_parameters_alloc();
		AVCodecParameters *par = pAVCodecParameters;
		par->codec_type = AVMediaType::AVMEDIA_TYPE_VIDEO;
		par->codec_id = AVCodecID::AV_CODEC_ID_H264;
		par->codec_tag = 27;

		par->bit_rate = 0; // 0   int64
		par->bits_per_coded_sample = 0; //   0   int
		par->bits_per_raw_sample = 8; //     8   int
		par->block_align = 0; //     0   int
		par->channel_layout = 0; //  0   unsigned int64
		par->channels = 0; //    0   int
		par->chroma_location = AVChromaLocation::AVCHROMA_LOC_LEFT; //    AVCHROMA_LOC_LEFT (1)   AVChromaLocation
		par->codec_id = AVCodecID::AV_CODEC_ID_H264; //    AV_CODEC_ID_H264 (28)   AVCodecID
		par->codec_tag = 0;//27; //  27  unsigned int
		par->codec_type = AVMediaType::AVMEDIA_TYPE_VIDEO; //  AVMEDIA_TYPE_VIDEO (0)  AVMediaType
		par->color_primaries = AVColorPrimaries::AVCOL_PRI_BT709;//AVColorPrimaries::AVCOL_PRI_BT709; //     AVCOL_PRI_BT709 (1) AVColorPrimaries
		par->color_range = AVColorRange::AVCOL_RANGE_JPEG;//AVColorRange::AVCOL_RANGE_MPEG; //    AVCOL_RANGE_MPEG (1)    AVColorRange
		par->color_space = AVColorSpace::AVCOL_SPC_BT709;//AVColorSpace::AVCOL_SPC_BT709; //    AVCOL_SPC_BT709 (1) AVColorSpace
		par->color_trc = AVColorTransferCharacteristic::AVCOL_TRC_BT709;//AVColorTransferCharacteristic::AVCOL_TRC_BT709; //   AVCOL_TRC_BT709 (1) AVColorTransferCharacteristic
		par->extradata = nullptr; //   ""  unsigned char*
		par->extradata_size = 0; //  78  int
		par->field_order = AVFieldOrder::AV_FIELD_PROGRESSIVE; //     AV_FIELD_PROGRESSIVE (1)    AVFieldOrder
		par->format = 12; // 0   int
		par->frame_size = 0; // 0   int
		par->height = 768;//2160; // 2160    int
		par->initial_padding = 0; // 0   int
		par->level = 42; // 51  int
		par->profile = 100; // 77  int
		par->sample_aspect_ratio.den = 0; //  @0x2562ab23a40  AVRational
		par->sample_aspect_ratio.den = 1; // 1   int
		par->sample_aspect_ratio.num = 0; // 0   int
		par->sample_rate = 0; //  0   int
		par->seek_preroll = 0; //  0   int
		par->trailing_padding = 0; // 0   int
		par->video_delay = 0; //  1   int
		par->width = 1024;//3840; //  3840int
	}
	{
		AVCodecContext *codec = codec_ctx;
		const AVCodecParameters *par = pAVCodecParameters;
		codec->codec_type = par->codec_type;
		codec->codec_id = par->codec_id;
		codec->codec_tag = par->codec_tag;

		codec->bit_rate = par->bit_rate;
		codec->bits_per_coded_sample = par->bits_per_coded_sample;
		codec->bits_per_raw_sample = par->bits_per_raw_sample;
		codec->profile = par->profile;
		codec->level = par->level;

		switch (par->codec_type) {
		case AVMEDIA_TYPE_VIDEO:
			codec->pix_fmt = (AVPixelFormat)par->format;
			codec->width = par->width;
			codec->height = par->height;
			codec->field_order = par->field_order;
			codec->color_range = par->color_range;
			codec->color_primaries = par->color_primaries;
			codec->color_trc = par->color_trc;
			codec->colorspace = par->color_space;
			codec->chroma_sample_location = par->chroma_location;
			codec->sample_aspect_ratio = par->sample_aspect_ratio;
			codec->has_b_frames = par->video_delay;
			break;
		case AVMEDIA_TYPE_AUDIO:
			codec->sample_fmt = (AVSampleFormat)par->format;
			codec->channel_layout = par->channel_layout;
			codec->channels = par->channels;
			codec->sample_rate = par->sample_rate;
			codec->block_align = par->block_align;
			codec->frame_size = par->frame_size;
			codec->delay =
				codec->initial_padding = par->initial_padding;
			codec->trailing_padding = par->trailing_padding;
			codec->seek_preroll = par->seek_preroll;
			break;
		case AVMEDIA_TYPE_SUBTITLE:
			codec->width = par->width;
			codec->height = par->height;
			break;
		}
	}

	codec_ctx->flags |= AV_CODEC_FLAG_LOW_DELAY; // 可选, 设置低延迟标志
	codec_ctx->flags2 |= AV_CODEC_FLAG2_FAST;     // 可选, 设置低延迟标志
												  // p_codec_ctx->flags2       |= AV_CODEC_FLAG2_CHUNKS;   // 可选, 设置分块标志
												  // p_codec_ctx->flags2       |= AV_CODEC_FLAG2_IGNORE_CROP;   // 可选, 设置分块标志
												  // p_codec_ctx->thread_count  = 1;                       // 可选, 仅使用单线程解码

	avcodec_parameters_to_context(codec_ctx, pAVCodecParameters);

	AVDictionary*             opts = nullptr;
	//av_dict_set(&opts, "bufsize", "14745600000", 0);
	//av_dict_set(&opts, "buffer_size", "14745600000", 0);
	//av_dict_set(&opts, "max_delay", "400", 0);
	//av_dict_set(&opts, "stimeout", "50000000", 0); // 设置超时断开连接时间 50000000
	//av_dict_set(&opts, "buffer_size", "2147480000", 0); // 原生的ffmpeg参数在对1920x1080的RTSP流进行播放时,花屏现象很严重,根据网上查的资料,可以通过增大“buffer_size”参数来提高画质,减少花屏现象
	av_dict_set(&opts, "tune", "zerolatency", 0);
	av_dict_set(&opts, "preset", "fast", 0);
	av_dict_set(&opts, "profile", "high422", 0);
	//打开编码器
	avcodec_open2(codec_ctx, codec, &opts);

	//创建AVFrame对象
	frame = av_frame_alloc();
	frame->format = codec_ctx->pix_fmt;
	frame->width = codec_ctx->width;
	frame->height = codec_ctx->height;
	av_image_alloc(frame->data, frame->linesize, codec_ctx->width, codec_ctx->height, codec_ctx->pix_fmt, 32);

	//创建AVPacket对象
	//pkt = (AVPacket *)av_malloc(sizeof(AVPacket));
	pkt = av_packet_alloc();
	av_init_packet(pkt);

	std::vector<int > capIndex = { 0/*, 1, 2*/ };
	cv::VideoCapture* capture = new cv::VideoCapture(0);
	if (!capture)
		return;
	capture->set(cv::CAP_PROP_FRAME_WIDTH, 1024);
	capture->set(cv::CAP_PROP_FRAME_HEIGHT, 768);
	capture->set(cv::CAP_PROP_FPS, 30);

	int length = 0;
	while (true)
	{
		QMutexLocker locker(&m_mutex);
		if (!recieveFlag)
			break;

		//for (int i = 0; i < capIndex.size(); i++)
		{
			cv::Mat mat_frame;// = cv::Mat::zeros(1920, 1080, CV_8UC3);
			bool bre = capture->read(mat_frame);
			if(!bre) continue;
			//cv::imshow("frame", mat_frame);
			//cv::waitKey(30);
			/*capPtrs[i]->read(mat_frame);
			transMats[i]->sendPic(mat_frame);*/
			//std::cout << "222 "<< std::endl;
			//if ( 0)
			{
				static char szT[512];
				szT[0] = 0x47;
				szT[256] = 0x47;
				bool result = mat2Packet(mat_frame, frame, codec_ctx, pkt);
				int pkt_size = pkt->size;
				std::cout << "1111 " << result << pkt_size << std::endl;
				if (result && pkt_size > 0)
				{

					char* pT = (char*)pkt->data;
					while (pkt_size >= 510)
					{
						memcpy(szT + 1, pT, 255);
						memcpy(szT + 257, pT + 255, 255);
						length = m_pktSocket->writeDatagram(szT, 512, QHostAddress(m_strIP), m_uintPort);
						pT += 510;
						pkt_size -= 510;
					}
					if (pkt_size > 255)
					{
						memcpy(szT + 1, pT, pkt_size);
						length = m_pktSocket->writeDatagram(szT, 256, QHostAddress(m_strIP), m_uintPort);
						pT += 255;
						pkt_size -= 255;
					}
					if (pkt_size > 0)
					{
						memcpy(szT + 1, pT, pkt_size);
						memset(szT + 1 + pkt_size, 0, 255 - pkt_size);
						length = m_pktSocket->writeDatagram(szT, pkt_size + 1, QHostAddress(m_strIP), m_uintPort);
					}

					av_init_packet(pkt);
				}
				std::cout << "222 " << result << pkt_size << std::endl;
			}
		}
	}

	if (capture)
		delete capture;
}

PktRecieveThread::PktRecieveThread(QObject* parent /* = 0 */) : QThread(parent)
{
	m_pktSocket = new QUdpSocket(this);
}

PktRecieveThread::~PktRecieveThread()
{
	setRecieveFlag(false);
	quit();
	wait();
}

void PktRecieveThread::setIPAndPort(const QString& ip_, quint16 port_)
{
	m_strIP = ip_;
	m_uintPort = port_;
}

void PktRecieveThread::setRecieveFlag(bool f)
{
	QMutexLocker locker(&m_mutex);
	recieveFlag = f;
}

void PktRecieveThread::run()
{
	setRecieveFlag(true);

	if (!m_pktSocket)
		m_pktSocket = new QUdpSocket(this);

	m_pktSocket->bind(QHostAddress(m_strIP), m_uintPort, QUdpSocket::ShareAddress | QUdpSocket::ReuseAddressHint);

	InitCodec();
	while (true)
	{
		slot_recievePic();
		QMutexLocker locker(&m_mutex);
		if (!recieveFlag)
			break;

		QThread::usleep(1);
	}
	//connect(m_pktSocket, &QUdpSocket::readyRead, this, &PktRecieveThread::slot_recievePic);
	av_parser_close(m_pCodecParserCtx);
	av_frame_free(&m_pFrame);
	avcodec_close(m_pCodecCtx);
	av_free(m_pCodecCtx);
	//QThread::run();
}

void PktRecieveThread::InitCodec()
{
	avcodec_register_all();
	AVCodecID codec_id = AV_CODEC_ID_H264;
	const AVCodec *pCodec = avcodec_find_decoder(codec_id);
	if (!pCodec)
	{
		printf("Codec not found\n");
	}
	m_pCodecCtx = avcodec_alloc_context3(pCodec);
	if (!m_pCodecCtx)
	{
		printf("Could not allocate video codec context\n");
	}
	m_pCodecParserCtx = av_parser_init(codec_id);
	if (!m_pCodecParserCtx)
	{
		printf("Could not allocate video parser context\n");
	}
	if (avcodec_open2(m_pCodecCtx, pCodec, NULL) < 0)
	{
		printf("Could not open codec\n");
	}
	m_pFrame = av_frame_alloc();
	m_pPacket = new AVPacket;
	av_init_packet(m_pPacket);
}

bool PktRecieveThread::Decode(const unsigned char *pData, int nLength)
{
	bool bReturn = false;
	int i, cur_size = nLength;
	const unsigned char *cur_ptr = pData;
	while (cur_size > 0)
	{
		int len = av_parser_parse2(m_pCodecParserCtx, m_pCodecCtx, &m_pPacket->data, &m_pPacket->size, cur_ptr, cur_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
		cur_ptr += len;
		cur_size -= len;
		if (m_pPacket->size == 0)
			continue;

		int got_picture;
		int ret = avcodec_decode_video2(m_pCodecCtx, m_pFrame, &got_picture, m_pPacket);
		if (ret >= 0 && got_picture)
		{
			// AVFrame转QImage
			SwsContext* img_convert_ctx = sws_getContext(m_pCodecCtx->width, m_pCodecCtx->height, m_pCodecCtx->pix_fmt, m_pCodecCtx->width, m_pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
			AVFrame* frameRGB = av_frame_alloc();
			avpicture_alloc((AVPicture*)frameRGB, AV_PIX_FMT_RGB24, m_pCodecCtx->width, m_pCodecCtx->height);
			sws_scale(img_convert_ctx, m_pFrame->data, m_pFrame->linesize, 0, m_pCodecCtx->height, frameRGB->data, frameRGB->linesize);

			QImage image(frameRGB->data[0], m_pCodecCtx->width, m_pCodecCtx->height, frameRGB->linesize[0], QImage::Format_RGB888);
			emit sig_pictureByte(image);

			bReturn = true;
		}
		av_free_packet(m_pPacket);
	}
	return bReturn;
}


void PktRecieveThread::slot_recievePic()
{
	while (m_pktSocket->hasPendingDatagrams())
	{
		QByteArray ba;
		ba.resize(m_pktSocket->pendingDatagramSize());
		m_pktSocket->readDatagram(ba.data(), ba.size());
		const unsigned char *p = (const unsigned char *)ba.data() + 1;
		int nS = (ba.size() >> 8);
		while (nS > 0)
		{
			Decode(p, 255);
			p += 256;
			nS--;
		}
	}
}

VoiceCallWidget::VoiceCallWidget(QWidget *parent) :
	LyDialog(tr2("音视频通话"), parent),
    ui(new Ui::VoiceCallWidget)
{
    ui->setupUi(centralWidget());
	this->setFixedSize(600, 500);

	udpSocket = new QUdpSocket(this); // 用于与连接的客户端通讯的QUdpSocket
									  // Multicast路由层次,1表示只在同一局域网内
									  // 组播TTL: 生存时间,每跨1个路由会减1,多播无法跨过大多数路由所以为1
									  // 默认值是1,表示数据包只能在本地的子网中传送。
	udpSocket->setSocketOption(QAbstractSocket::MulticastTtlOption, 1);
	connect(udpSocket, &QUdpSocket::readyRead, this, &VoiceCallWidget::onSocketReadyRead);

	m_callSocket = new QUdpSocket(this);
	connect(m_callSocket, &QUdpSocket::readyRead, this, &VoiceCallWidget::slot_callRequest);

	ui->cbxMultiIP->addItem("239.1.1.1");
	ui->lePort->setText("1001");

	// 根据需求隐藏控件
	ui->label->hide();
	ui->cbxMultiIP->hide();
	ui->label_2->hide();
	ui->lePort->hide();
	ui->pbtnExitMulticast->hide();
	ui->pbtnJoinMulticast->hide();
	ui->leSendData->hide();
	ui->pbtnSend->hide();

	ui->labelVideo->setHidden(true);
	ui->plainTextEdit->setHidden(false);

	// 语音通话
	initAudioInput();
	buttonIsAbled(false, false, false);

	connect(ui->pbtnJoinMulticast, &QPushButton::clicked, this, &VoiceCallWidget::on_pbtnJoinMulticast_clicked);
	connect(ui->pbtnExitMulticast, &QPushButton::clicked, this, &VoiceCallWidget::on_pbtnExitMulticast_clicked);
	connect(ui->pbtnSend, &QPushButton::clicked, this, &VoiceCallWidget::on_pbtnSend_clicked);
	connect(ui->buttonVoiceCall, &QPushButton::clicked, this, &VoiceCallWidget::on_buttonVoiceCall_clicked);
	connect(ui->buttonVideoCall, &QPushButton::clicked, this, &VoiceCallWidget::on_buttonVideoCall_clicked);
	connect(ui->buttonHangUp, &QPushButton::clicked, this, &VoiceCallWidget::on_buttonHangUp_clicked);
	connect(ui->buttonOnorOffVideo, &QPushButton::clicked, this, &VoiceCallWidget::on_buttonOnorOffVideo_clicked);
}

VoiceCallWidget::~VoiceCallWidget()
{
	if (udpSocket)
		udpSocket->close();
	if (m_callSocket)
		m_callSocket->close();
	if (m_inputDevice)
		m_inputDevice->close();
	if (m_outputDevice)
		m_outputDevice->close();
	if (m_audioInput)
		m_audioInput->stop();
	if (m_audioOutput)
		m_audioOutput->stop();
	
	delete ui;
}

void VoiceCallWidget::paintEvent(QPaintEvent *event)
{
	QPainter painter(this);
	QStyleOption opt;
	opt.init(this);
	style()->drawPrimitive(QStyle::PE_Widget, &opt, &painter, this);
}

void VoiceCallWidget::buttonIsAbled(bool callAble, bool hangUpAble, bool videoAble)
{
	ui->buttonVoiceCall->setHidden(!callAble);
	ui->buttonVideoCall->setHidden(!callAble);
	ui->buttonHangUp->setHidden(!hangUpAble);
	ui->buttonOnorOffVideo->setHidden(!hangUpAble);
	m_enableVideo = videoAble;
	if (videoAble)
		ui->buttonOnorOffVideo->setText(tr2("开启视频"));
	else
		ui->buttonOnorOffVideo->setText(tr2("关闭视频"));
}

void VoiceCallWidget::initAudioInput()
{
	QAudioFormat format;
	format.setSampleRate(8000);
	format.setChannelCount(1);
	format.setSampleSize(16);
	format.setCodec("audio/pcm");
	format.setByteOrder(QAudioFormat::LittleEndian);
	format.setSampleType(QAudioFormat::SignedInt);

	QAudioDeviceInfo info = QAudioDeviceInfo::defaultInputDevice();
	if (!info.isFormatSupported(format)) {
		qWarning() << "Default format not supported, trying to use the nearest.";
		format = info.nearestFormat(format);
	}

	m_audioInput = new QAudioInput(format);
	//    connect(m_audioInput, &QAudioInput::stateChanged, this, &VoiceCallWidget::handleStateChanged);

	m_audioOutput = new QAudioOutput(format);
	m_audioOutput->setBufferSize(10000000);
	m_outputDevice = m_audioOutput->start();
}

void VoiceCallWidget::slot_callRequest()
{
	while (m_callSocket->hasPendingDatagrams()) {
		video vp;
		memset(&vp, 0, sizeof(vp));
		QHostAddress peerAddr;
		quint16 peerPort;
		m_callSocket->readDatagram((char*)&vp, sizeof(video), &peerAddr, &peerPort);

		m_callSocket->disconnectFromHost();
		m_callSocket->bind(QHostAddress(m_curAddress), m_curPort, QUdpSocket::ShareAddress | QUdpSocket::ReuseAddressHint);

		char tmp1[] = "#VIDEOCALLREQUEST";
		if (strcmp(vp.data, tmp1) == 0) {
			m_targetIP = peerAddr;
			m_targetPort = peerPort;

			QMessageBox::StandardButton button = LyMessageBox::question(nullptr, tr2("通话"), tr2("您有一个视频通话,是否接听?"));
			if (button == QMessageBox::Yes) {
				m_inputDevice = m_audioInput->start();
				//                m_inputDevice->open(QIODevice::WriteOnly);
				connect(m_inputDevice, &QIODevice::readyRead, this, &VoiceCallWidget::slot_sendAudioData);

				OnorOffVideo(true, false);
				m_pktRecieveThread->setRecieveFlag(true);
				m_pktRecieveThread->start();

				ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("通话中..."));
				buttonIsAbled(false, true, false);
				flag = true;
			}
			else if (button == QMessageBox::No) {
				ui->labelVideo->setHidden(true);
				ui->plainTextEdit->setHidden(false);

				video vp;
				memset(&vp, 0, sizeof(vp));
				strcpy(vp.data, "#REFUSE");
				vp.lens = strlen(vp.data);
				m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);
			}
			return;
		}

		char tmp2[] = "#VOICECALLREQUEST";
		if (strcmp(vp.data, tmp2) == 0) {
			m_targetIP = peerAddr;
			m_targetPort = peerPort;

			QMessageBox::StandardButton button = LyMessageBox::question(nullptr, tr2("通话"), tr2("您有一个语音通话,是否接听?"));
			if (button == QMessageBox::Yes) {
				m_inputDevice = m_audioInput->start();
				//                m_inputDevice->open(QIODevice::WriteOnly);
				connect(m_inputDevice, &QIODevice::readyRead, this, &VoiceCallWidget::slot_sendAudioData);

				m_pktRecieveThread->setRecieveFlag(true);
				m_pktRecieveThread->start();

				ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("通话中..."));
				buttonIsAbled(false, true, true);
				flag = true;
			}
			else if (button == QMessageBox::No) {
				video vp;
				memset(&vp, 0, sizeof(vp));
				strcpy(vp.data, "#REFUSE");
				vp.lens = strlen(vp.data);
				m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);
			}
			return;
		}

		char tmp3[] = "#HANGUP";
		if (strcmp(vp.data, tmp3) == 0) {
			m_audioInput->stop();

			OnorOffVideo(false, false);
			ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("对方已挂断电话..."));
			flag = false;
			buttonIsAbled(true, false, false);
			return;
		}

		char tmp4[] = "#REFUSE";
		if (strcmp(vp.data, tmp4) == 0) {
			m_audioInput->stop();

			OnorOffVideo(false, false);
			ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("对方拒绝接通电话..."));
			flag = false;
			buttonIsAbled(true, false, false);
			return;
		}

		char tmp5[] = "#OFFVIDEO";
		if (strcmp(vp.data, tmp5) == 0) {
			ui->labelVideo->setHidden(true);
			ui->plainTextEdit->setHidden(false);
			return;
		}

		char tmp6[] = "#ONVIDEO";
		if (strcmp(vp.data, tmp6) == 0) {
			ui->labelVideo->setHidden(false);
			ui->plainTextEdit->setHidden(true);
			return;
		}

		if(m_outputDevice)
			m_outputDevice->write(vp.data, vp.lens);
		//        ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit(vp.data));
	}
}

void VoiceCallWidget::slot_sendAudioData()
{
	video vp;
	memset(&vp, 0, sizeof(vp));
	vp.lens = m_inputDevice->read(vp.data, 1024);
	int size = m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);

	if (size <= 0) {
		QAbstractSocket::SocketError error = m_callSocket->error();
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("拨打电话失败,错误:"));
		if (error == QAbstractSocket::NetworkError)
			ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("网络连接异常!"));
	}
}

void VoiceCallWidget::handleStateChanged(QAudio::State newState)
{
	switch (newState) {
	case QAudio::StoppedState:
		if (m_audioInput->error() != QAudio::NoError) {
			// Error handling
		}
		else {
			// Finished recording
		}
		break;

	case QAudio::ActiveState:
		// Started recording - read from IO device

		break;

	default:
		// ... other cases as appropriate
		break;
	}
}

void VoiceCallWidget::on_pbtnJoinMulticast_clicked()
{
	QString IP = ui->cbxMultiIP->currentText();
	groupAddress = QHostAddress(IP); // QHostAddress::AnyIPv4 与此地址绑定的socket将仅侦听IPv4交互
	QString groupPort = ui->lePort->text();// groupPort,多播组统一的一个端口
	quint16 portValue = groupPort.toUShort();

	// QUdpSocket::ShareAddress 允许其他服务绑定到相同的地址和端口
	// QUdpSocket::ReuseAddressHint 向QAbstractSocket提供提示,提示它应尝试重新绑定服务,即使地址和端口已被另一个套接字绑定。在Windows和Unix上,这相当于SO_REUSEADDR套接字选项。
	// QUdpSocket::ShareAddress | QUdpSocket::ReuseAddressHint 组合使用才能在本机同时启动多个程序绑定相同端口,适合没有局域网只有一台电脑的本地测试使用
	if (udpSocket->bind(QHostAddress::AnyIPv4, portValue, QUdpSocket::ShareAddress | QUdpSocket::ReuseAddressHint)) //先绑定端口
	{
		// 加入到组播
		QString deviceName = "0";
		QList<QNetworkInterface> netList = QNetworkInterface::allInterfaces();
		foreach(QNetworkInterface net, netList)
		{
			int netId = net.index();
			std::string nameStr = net.name().toStdString();
			if (nameStr.find("ethernet") != nameStr.npos)
			{
				deviceName = net.name();
				break;
			}
		}

		const auto& NetInterface = QNetworkInterface::interfaceFromName(deviceName);
		if (NetInterface.addressEntries().size() <= 0)
		{
			LyMessageBox::information(nullptr, tr2("提示"), tr2("UDP组播连接错误!"));
			return;
		}

		udpSocket->setMulticastInterface(NetInterface);
		udpSocket->joinMulticastGroup(groupAddress, NetInterface); //加入IP地址为groupAddress的多播组,绑定端口groupPort进行通信
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("加入组播成功"));
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("组播地址IP:") + IP);
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("绑定端口:") + QString::number(portValue));
		ui->pbtnJoinMulticast->setEnabled(false);
		ui->pbtnExitMulticast->setEnabled(true);
		ui->cbxMultiIP->setEnabled(false);
		connect(udpSocket, &QUdpSocket::readyRead, this, &VoiceCallWidget::onSocketReadyRead);

		// 语音通话
		auto ipList = QNetworkInterface::allAddresses();
		QHostAddress curIP;
		for (auto& address : ipList) {
			auto nProtocol = address.protocol();
			if (nProtocol == QAbstractSocket::IPv4Protocol) {
				curIP = address;
				break;
			}
		}
		m_curAddress = curIP.toString();
		ui->leMyID->setText(m_curAddress);
		//test
		//ui->cbFriID->addItem(m_curAddress);

		m_curPort = QRandomGenerator::global()->bounded(2001, 9999);
		//        ui->plainTextEdit->appendPlainText(QString::number(m_curPort));

		m_curVideoPort = QRandomGenerator::global()->bounded(10001, 20001);

		QString groupPort = ui->lePort->text();
		quint16 portValue = groupPort.toUShort();
		QString msg = "A" + m_curAddress + " " + QString::number(m_curPort) + " " + QString::number(m_curVideoPort);
		QByteArray datagram = msg.toUtf8();
		udpSocket->writeDatagram(datagram, groupAddress, portValue);

		m_callSocket->bind(curIP, m_curPort, QUdpSocket::ShareAddress | QUdpSocket::ReuseAddressHint);
		connect(m_callSocket, &QUdpSocket::readyRead, this, &VoiceCallWidget::slot_callRequest);

		m_pktRecieveThread = new PktRecieveThread(this);
		m_pktRecieveThread->setIPAndPort(m_curAddress, m_curVideoPort);
		connect(m_pktRecieveThread, &PktRecieveThread::sig_pictureByte, this, [&](const QImage &image) {
			const QPixmap &videoPixmap = QPixmap::fromImage(image);
			//videoPixmap.scaled(ui->labelVideo->size(), Qt::KeepAspectRatio);
			ui->labelVideo->setScaledContents(true);
			ui->labelVideo->setPixmap(videoPixmap);
		});

		buttonIsAbled(true, false, false);
	}
	else
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("**绑定端口失败"));
}

void VoiceCallWidget::on_pbtnExitMulticast_clicked()
{
	if (flag) {
		LyMessageBox::warning(nullptr, tr2("警告"), tr2("您还在通话中,请先挂断电话再进行此操作"));
		return;
	}

	// 通讯ID中移除此id
	QString groupPort = ui->lePort->text();
	quint16 portValue = groupPort.toUShort();
	QByteArray datagram = QString("D" + ui->leMyID->text()).toUtf8();
	udpSocket->writeDatagram(datagram, groupAddress, portValue);

	udpSocket->leaveMulticastGroup(groupAddress);// 退出组播
	udpSocket->abort(); // 中止当前连接并重置套接字。与disconnectFromHost()不同,此函数会立即关闭套接字,丢弃写入缓冲区中的所有挂起数据。
	ui->pbtnJoinMulticast->setEnabled(true);
	ui->pbtnExitMulticast->setEnabled(false);
	ui->cbxMultiIP->setEnabled(true);
	ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("**已退出组播,解除端口绑定"));

	m_callSocket->abort();
	ui->cbFriID->clear();
	buttonIsAbled(false, false, false);
}

void VoiceCallWidget::on_pbtnSend_clicked()
{
	QString groupPort = ui->lePort->text();// groupPort,多播组统一的一个端口
	quint16 portValue = groupPort.toUShort();
	QString msg = ui->leSendData->text();
	QByteArray datagram = msg.toUtf8();

	udpSocket->writeDatagram(datagram, groupAddress, portValue);
	ui->plainTextEdit->appendPlainText("[multicst] " + msg);
	ui->leSendData->clear();
	ui->leSendData->setFocus();
}

void VoiceCallWidget::onSocketReadyRead()
{
	while (udpSocket->hasPendingDatagrams())
	{
		QByteArray datagram;
		datagram.resize(udpSocket->pendingDatagramSize());
		QHostAddress peerAddr;
		quint16 peerPort;
		udpSocket->readDatagram(datagram.data(), datagram.size(), &peerAddr, &peerPort);
		QString str = datagram.data();
		QString address{ "" };
		if (str.length() > 0) {
			address = str.mid(1);
		}
		//        QRegExp reg("[0-9]+");
		if (address != "" && (str.at(0) == 'A' || str.at(0) == 'D' || str.at(0) == 'E')) {
			int len = address.length();
			QString add{ "" };
			int index = 0;
			for (int i = 0; i < len; i++) {
				if (address.at(i) == ' ') {
					index = i + 1;
					break;
				}
				add += address.at(i);
			}

			QString strPort{ "" };
			quint16 port, videoPort;
			if (index != 0) {
				for (int i = index; i < len; i++) {
					if (address.at(i) == ' ') {
						index = i + 1;
						break;
					}
					strPort += address.at(i);
				}
			}
			port = strPort.toUShort();
			videoPort = address.mid(index).toUShort();

			if (add == m_curAddress)
				continue;

			if (str.at(0) == 'A') {
				m_friIPPort[add] = port;
				m_friIPPortVideo[add] = videoPort;
				if(ui->cbFriID->findText(add) == -1)
					ui->cbFriID->addItem(add);

				QString groupPort = ui->lePort->text();
				quint16 portValue = groupPort.toUShort();
				QString msg = "E" + m_curAddress + " " + QString::number(m_curPort) + " " + QString::number(m_curVideoPort);
				QByteArray datagram = msg.toUtf8();
				udpSocket->writeDatagram(datagram, groupAddress, portValue);
			}
			else if (str.at(0) == 'D') {
				int index = 0;
				for (int i = 0; i < ui->cbFriID->count(); i++) {
					auto tmp = ui->cbFriID->itemText(i);
					if (tmp == address) {
						index = i;
						break;
					}
				}
				ui->cbFriID->removeItem(index);
				m_friIPPort.remove(address);
				m_friIPPortVideo.remove(address);
			}
			else if (str.at(0) == 'E') {
				bool exit = false;
				for (int i = 0; i < ui->cbFriID->count(); i++) {
					auto tmp = ui->cbFriID->itemText(i);
					if (tmp == add) {
						exit = true;
						break;
					}
				}
				if (!exit) {
					ui->cbFriID->addItem(add);
					m_friIPPort[add] = port;
					m_friIPPortVideo[add] = videoPort;
				}
			}
		}
		else {
			QString peer = "[From " + peerAddr.toString() + ":" + QString::number(peerPort) + "] ";
			ui->plainTextEdit->appendPlainText(peer + str);
		}
	}
}

void VoiceCallWidget::on_buttonVoiceCall_clicked()
{
	auto targetIP = ui->cbFriID->currentText();

	m_targetIP = QHostAddress(targetIP);
	m_targetPort = m_friIPPort.value(targetIP);

	video vp;
	memset(&vp, 0, sizeof(vp));
	strcpy(vp.data, "#VOICECALLREQUEST");
	vp.lens = strlen(vp.data);
	int size = m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);

	if (size <= 0) {
		QAbstractSocket::SocketError error = m_callSocket->error();
		qDebug() << QString::fromLocal8Bit("拨打电话失败,错误:") << error;
		if (error == QAbstractSocket::NetworkError)
			qDebug() << QString::fromLocal8Bit("网络连接异常!");
	}
	else {
		m_inputDevice = m_audioInput->start();
		connect(m_inputDevice, &QIODevice::readyRead, this, &VoiceCallWidget::slot_sendAudioData);
		flag = true;
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("通话中..."));
		buttonIsAbled(false, true, true);

		m_pktRecieveThread->setRecieveFlag(true);
		m_pktRecieveThread->start();
	}
}

void VoiceCallWidget::on_buttonVideoCall_clicked()
{
	auto targetIP = ui->cbFriID->currentText();

	m_targetIP = QHostAddress(targetIP);
	m_targetPort = m_friIPPort.value(targetIP);

	video vp;
	memset(&vp, 0, sizeof(vp));
	strcpy(vp.data, "#VIDEOCALLREQUEST");
	vp.lens = strlen(vp.data);
	int size = m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);

	if (size <= 0) {
		QAbstractSocket::SocketError error = m_callSocket->error();
		qDebug() << QString::fromLocal8Bit("拨打电话失败,错误:") << error;
		if (error == QAbstractSocket::NetworkError)
			qDebug() << QString::fromLocal8Bit("网络连接异常!");
	}
	else {
		m_inputDevice = m_audioInput->start();
		connect(m_inputDevice, &QIODevice::readyRead, this, &VoiceCallWidget::slot_sendAudioData);
		flag = true;
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("通话中..."));
		buttonIsAbled(false, true, false);

		OnorOffVideo(true, false);
		m_pktRecieveThread->setRecieveFlag(true);
		m_pktRecieveThread->start();
	}
}

void VoiceCallWidget::on_buttonHangUp_clicked()
{
	m_audioInput->stop();
	m_callSocket->disconnectFromHost();
	m_callSocket->bind(QHostAddress(m_curAddress), m_curPort, QUdpSocket::ShareAddress | QUdpSocket::ReuseAddressHint);

	flag = false;

	video vp;
	memset(&vp, 0, sizeof(vp));
	strcpy(vp.data, "#HANGUP");
	vp.lens = strlen(vp.data);
	int size = m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);
	if (size <= 0) {
		QAbstractSocket::SocketError error = m_callSocket->error();
		ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("拨打电话失败,错误:"));
		if (error == QAbstractSocket::NetworkError)
			ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("网络连接异常!"));
	}

	ui->plainTextEdit->appendPlainText(QString::fromLocal8Bit("已挂断电话。"));
	
	OnorOffVideo(false, false);

	buttonIsAbled(true, false, false);
}

void VoiceCallWidget::OnorOffVideo(bool enableVideo, bool isButtonClicked)
{
	if (enableVideo) {
		if (!isButtonClicked) {
			ui->labelVideo->setHidden(false);
			ui->labelVideo->setText(QString::fromLocal8Bit("正在建立视频链接..."));
			ui->plainTextEdit->setHidden(true);
		}

		if (!m_pktSendThread)
			m_pktSendThread = new PktSendThread;
		//test
		//m_pktSendThread->setIPAndPort(m_curAddress, m_curVideoPort);
		m_pktSendThread->setIPAndPort(m_targetIP.toString(), m_friIPPortVideo.value(m_targetIP.toString()));
		m_pktSendThread->setRecieveFlag(true);
		m_pktSendThread->start();

		//m_pktRecieveThread->setRecieveFlag(true);
		//m_pktRecieveThread->start();
	}
	else {
		if (!isButtonClicked) {
			ui->labelVideo->setHidden(true);
			ui->plainTextEdit->setHidden(false);
		}
		
		if (m_pktSendThread) {
			m_pktSendThread->setRecieveFlag(false);
			m_pktSendThread->quit();
		}

		//m_pktRecieveThread->quit();
		//m_pktRecieveThread->setRecieveFlag(false);
	}
}

void VoiceCallWidget::on_buttonOnorOffVideo_clicked()
{
	OnorOffVideo(m_enableVideo, true);
	video vp;
	memset(&vp, 0, sizeof(vp));
	if (!m_enableVideo) {
		strcpy(vp.data, "#OFFVIDEO");
		vp.lens = strlen(vp.data);
		int size = m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);
	}
	else {
		strcpy(vp.data, "#ONVIDEO");
		vp.lens = strlen(vp.data);
		int size = m_callSocket->writeDatagram((const char*)&vp, sizeof(vp), m_targetIP, m_targetPort);
	}

	m_enableVideo = !m_enableVideo;

	if (m_enableVideo)
		ui->buttonOnorOffVideo->setText(tr2("开启视频"));
	else
		ui->buttonOnorOffVideo->setText(tr2("关闭视频"));
}

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值