Qt、openCV读取摄像头,快速双边滤波实现视频流美颜、磨皮,显示

环境配置:opencv2.4.13 vs2015 Qt5.7.1

注意:

1、openCV自带的磨皮、美白效果只能进行参考,性能不高,才采用快速双边滤波

2、示例中openCV只用来打开了摄像头,可用系统自带api替代;

3、需要先编译jpeg.lib

处理步骤:

1:从摄像头抓取一帧;

2:利用libjpeg将IplImage转成jpeg;

3:快速双边滤波处理图像;

4:unsigned char *转QImage并显示;

性能参数:线程数=4 ,i5处理器, 640*480为30ms左右,fps可达30,效率非常高;

项目目的结构:

1、main.cpp

#include <QtWidgets/QApplication>
#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
using namespace std;
#include "cameratest.h"
int main(int argc, char *argv[])
{
	QApplication a(argc, argv);
	CameraTest w;
	w.show();
	return a.exec();
}

2、cameratest.h cameratest.cpp cameratest.ui

#ifndef CAMERATEST_H
#define CAMERATEST_H

#include <QtWidgets/QMainWindow>
#include "ui_cameratest.h"
#include "opencv2/core/core.hpp"    
#include "opencv2/highgui/highgui.hpp"    
#include "opencv2/imgproc/imgproc.hpp"    
#include <stdio.h>  
#include <opencv2/opencv.hpp>   

static float sigma_spatial = 0.05f;
static float sigma_range = 0.05f;

#define STB_IMAGE_IMPLEMENTATION
#define STB_IMAGE_WRITE_IMPLEMENTATION
#define _CRT_SECURE_NO_WARNINGS

#include <QImage>  
#include <QTimer>     // 设置采集数据的间隔时间
#include <QSlider>
using namespace cv;
class CameraTest : public QMainWindow
{
	Q_OBJECT

public:
	CameraTest(QWidget *parent = 0);
	~CameraTest();

private:
	Ui::CameraTestClass ui;
private slots:
	void openCamara();      // 打开摄像头  
	void readFarme();       // 读取当前帧信息  
	void closeCamara();     // 关闭摄像头。  
	void horizontalSliderChanged(int);
	void horizontalSliderChanged2(int);
private:
	QTimer    *timer;
	QImage    *imag;

	CvCapture *cam;// 视频获取结构, 用来作为视频获取函数的一个参数  
	IplImage  *frame;//申请IplImage类型指针,就是申请内存空间来存放每一帧图像
};

#endif // CAMERATEST_H
#include "cameratest.h"
#include <time.h>
#include <iostream>
#include "stb_image.h"
#include "RBFilter_AVX2.h"
#include "jpeg-9\jpeglib.h"
#include "crbfilter.h"
#include "stb_image_write.h"
using namespace std;
class TestRunTimer
{
	clock_t begTime;

public:
	void start() { begTime = clock(); }
	float elapsedTimeMS() { return float(clock() - begTime); }
};

unsigned char * testRunRecursiveBF_AVX2_mt(unsigned char * image, int len, int thread_count, bool multiThread = true)
{
	int width, height, channel;
	unsigned char * img = stbi_load_from_memory((stbi_uc*)image, len, &width, &height, &channel, 4);
	if (!img)
	{
		return NULL;
	}
	channel = 4;

	unsigned char * img_out;
	if (multiThread)
	{
		CRBFilterAVX2 rbf_object;
		bool success = rbf_object.initialize(width, height, thread_count, false);
		if (!success)
		{
			delete[] img;
			return nullptr;
		}
		rbf_object.setSigma(sigma_spatial, sigma_range);

		int pitch = rbf_object.getOptimalPitch(width);
		img_out = (unsigned char*)_aligned_malloc(pitch * height, 32);

		unsigned char* buffer = (unsigned char*)_aligned_malloc(pitch * height, 32);
		for (int y = 0; y < height; y++)
		{
			memcpy(buffer + y * pitch, img + y * width * 4, width * 4);
		}
		delete[] img;
		img = buffer;

		success = rbf_object.filter(img_out, img, width, height, pitch);

		if (!success)
			return nullptr;
	}
	else
	{
		CRBFilter rbf_object;

		int pitch = rbf_object.getOptimalPitch(width);

		img_out = (unsigned char*)_aligned_malloc(pitch * height, 32);
		unsigned char* buffer = (unsigned char*)_aligned_malloc(pitch * height, 32);
		for (int y = 0; y < height; y++)
		{
			memcpy(buffer + y * pitch, img + y * width * 4, width * 4);
		}
		delete[] img;
		img = buffer;

		rbf_object.CRB_Filter(img, img_out, width, height, pitch, sigma_spatial, sigma_range);
	}

	_aligned_free(img);
	return img_out;
}

static bool jpegSave(const char* filename, unsigned char *outbuffer, unsigned long outlen)
{
	FILE *f;
	fopen_s(&f, filename, "wb");
	if (!f)
		return false;

	size_t size = fwrite(outbuffer, (size_t)outlen, 1, f);
	fclose(f);


	if (size == 1)
		return true;

	return false;
}
/*********************************
********* IplImage转jpeg *********
**********************************/
static bool ipl2jpeg(IplImage *img, unsigned char **outbuffer, unsigned long outlen)
{
	unsigned char *outdata = (uchar *)img->imageData;
	struct jpeg_compress_struct cinfo = { 0 };
	struct jpeg_error_mgr jerr;
	JSAMPROW row_ptr[1];
	int row_stride;

	*outbuffer = NULL;
	outlen = 0;

	cinfo.err = jpeg_std_error(&jerr);
	jpeg_create_compress(&cinfo);


	cinfo.image_width = img->width;
	cinfo.image_height = img->height;
	cinfo.input_components = img->nChannels;
	cinfo.in_color_space = JCS_RGB;

	jpeg_mem_dest(&cinfo, outbuffer, &outlen);

	jpeg_set_defaults(&cinfo);
	jpeg_start_compress(&cinfo, TRUE);
	row_stride = img->width * img->nChannels;


	while (cinfo.next_scanline < cinfo.image_height)
	{
		row_ptr[0] = &outdata[cinfo.next_scanline * row_stride];
		jpeg_write_scanlines(&cinfo, row_ptr, 1);
	}

	jpeg_finish_compress(&cinfo);
	jpeg_destroy_compress(&cinfo);

	return true;
}
CameraTest::CameraTest(QWidget *parent)
	: QMainWindow(parent)
{
	ui.setupUi(this);
	cam = NULL;
	timer = new QTimer(this);
	imag = new QImage();         // 初始化  

								 /*信号和槽*/
	connect(timer, SIGNAL(timeout()), this, SLOT(readFarme()));  // 时间到,读取当前摄像头信息  
	connect(ui.open, SIGNAL(clicked()), this, SLOT(openCamara()));
	connect(ui.closeCam, SIGNAL(clicked()), this, SLOT(closeCamara()));
	connect(ui.horizontalSlider, SIGNAL(valueChanged(int)), this, SLOT(horizontalSliderChanged(int)));
	connect(ui.horizontalSlider_2, SIGNAL(valueChanged(int)), this, SLOT(horizontalSliderChanged2(int)));
	ui.horizontalSlider->setValue(5);
	ui.horizontalSlider->setMaximum(10);
	ui.horizontalSlider_2->setValue(5);
	ui.horizontalSlider_2->setMaximum(10);
}
void CameraTest::horizontalSliderChanged(int value)
{
	int a = ui.horizontalSlider->value();
	sigma_spatial = 0.01*value;
	
}
void CameraTest::horizontalSliderChanged2(int value)
{
	int a = ui.horizontalSlider_2->value();
	sigma_range = 0.01*value;
}
CameraTest::~CameraTest()
{

}
void CameraTest::openCamara()
{
	cam = cvCreateCameraCapture(0);//打开摄像头,从摄像头中获取视频  
	timer->start(30);              // 开始计时,超时则发出timeout()信号  1024/30 == fps
}
//
//TestRunTimer timer;
//timer.start();
//cout << ", QImage image---time ms: " << timer.elapsedTimeMS();
/*********************************
********* 读取摄像头信息 ***********
**********************************/
void CameraTest::readFarme()
{
	unsigned char * buffer;
	unsigned long  len;
	frame = cvQueryFrame(cam);
	
	ipl2jpeg(frame, &buffer, len);

	unsigned char * outImg = testRunRecursiveBF_AVX2_mt(buffer, len, 1,true);

	QImage image((const uchar*)outImg, frame->width, frame->height, QImage::Format_ARGB32);
	ui.label->setPixmap(QPixmap::fromImage(image));
	_aligned_free(outImg);
	free(buffer);
}

/*******************************
***关闭摄像头,释放资源,必须释放***
********************************/
void CameraTest::closeCamara()
{
	timer->stop();         // 停止读取数据。  

	cvReleaseCapture(&cam);//释放内存; 
}

3、crbfilter.h crbfilter.cpp RBFilter_AVX2.h RBFilter_AVX2.cpp 参考:快速双边滤波处理算法:https://www.cnblogs.com/cpuimage/p/7629304.html

jpeg-9源码

https://download.csdn.net/download/u013495598/11078766

Qt、openCV读取摄像头,快速双边滤波实现视频流美颜、磨皮,显示demo

https://download.csdn.net/download/u013495598/11078798

也可以邮件至362168322@qq.com,发demo

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
要在Qt中使用OpenCV读取显示摄像头视频流,你需要使用OpenCV库,并将其与Qt项目集成。 下面是一个简单的例子,展示了如何使用OpenCVQt读取显示摄像头视频流: ```cpp #include <QApplication> #include <QMainWindow> #include <QLabel> #include <QImage> #include <opencv2/opencv.hpp> int main(int argc, char *argv[]) { QApplication a(argc, argv); // 创建一个主窗口 QMainWindow window; window.setGeometry(100, 100, 800, 600); window.setWindowTitle("Camera Viewer"); // 创建一个标签用于显示图像 QLabel* label = new QLabel(&window); label->setGeometry(0, 0, 800, 600); label->setSizePolicy(QSizePolicy::Ignored, QSizePolicy::Ignored); // 打开默认摄像头 cv::VideoCapture cap(0); if (!cap.isOpened()) { qDebug() << "Failed to open camera!"; return -1; } // 读取显示摄像头图像 cv::Mat frame; while (cap.read(frame)) { // 将OpenCV图像转换为Qt图像 QImage qimg(frame.data, frame.cols, frame.rows, frame.step, QImage::Format_RGB888); // 在标签上显示图像 label->setPixmap(QPixmap::fromImage(qimg).scaled(label->size(), Qt::KeepAspectRatio, Qt::FastTransformation)); qApp->processEvents(); // 处理Qt事件,以便应用程序响应用户交互 } return a.exec(); } ``` 在上面的代码中,我们创建了一个主窗口和一个标签用于显示摄像头图像。我们使用OpenCV的VideoCapture类打开默认摄像头,然后在循环中读取每一帧图像,并将其转换为Qt图像。最后,我们将Qt图像显示在标签上。 注意,为了确保应用程序响应用户交互,我们在循环中调用了`qApp->processEvents()`。 以上就是一个简单的使用OpenCVQt读取显示摄像头视频流的例子。
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值