5.2.3图片的ROI区域混合:
#include<opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace cv;
using namespace std;
#include<iostream>
bool ROI_LinearBlending();
int main()
{
system("color 5E");
if (ROI_LinearBlending())
{cout << "ROI_LinearBlending" << "运行成功!" << endl; }
waitKey(0);
return 0;
}
bool ROI_LinearBlending()
{
Mat srcImage1, srcImage2;
srcImage1 = imread("diaochan.jpg");
srcImage2 = imread("wxr.jpg");
if (!srcImage1.data){
printf("读取错误!\n");
return false;
}
if (!srcImage2.data){
printf("读取错误!\n");
return false;
}
Mat imageROI;
imageROI = srcImage1(Rect(50, 50, srcImage2.cols, srcImage2.rows));
//imageROI = srcImage1(Range(50, 50+srcImage2.rows)
//,Range(200,200+srcImage2.cols,));
addWeighted(imageROI, 0.5, srcImage2, 0.3, 0., imageROI);
namedWindow("叠加窗口");
imshow("叠加窗口", srcImage1);
imwrite("0.1.jpg", srcImage1);
return true;
}
程序执行结果如图:
5.3,分离颜色通道、多通道图像混合:
#include<opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace cv;
using namespace std;
#include<iostream>
bool MultiChannelBlending();
void ShowHelpText();
int main()
{
system("color 9F");
if (MultiChannelBlending())
{
cout << "ROI_LinearBlending" << "运行成功!" << endl;
}
waitKey(0);
return 0;
}
bool MultiChannelBlending()
{
Mat srcImage1, srcImage2;
vector<Mat> channels;
Mat imageBlueChannel;
srcImage1 = imread("dch.jpg");
srcImage2 = imread("wxr.jpg",0);//传入单通道灰度图
if (!srcImage1.data){
printf("读取错误!\n");
return false;
}
if (!srcImage2.data){
printf("读取错误!\n");
return false;
}
split(srcImage1, channels);//分离原图三通道
imageBlueChannel = channels.at(0);
Mat imageROI = imageBlueChannel(Rect(50, 50, srcImage2.cols,
srcImage2.rows));//原图的蓝色通道找出ROI区域
addWeighted(imageROI, 1.0, srcImage2, 0.5, 0., imageROI);
///原图的蓝色通道找出ROI区域和logo区域混合,此时logo自动成蓝色单通道图
merge(channels, srcImage1);//合并原图通道
namedWindow("游戏原画+logo蓝色通道");
imshow("游戏原画+logo蓝色通道", srcImage1);
imwrite("1.0.jpg", srcImage1);
Mat imageGreenchannel;
srcImage1 = imread("dch.jpg");
srcImage2 = imread("wxr.jpg", 0);
if (!srcImage1.data){
printf("读取错误!\n");
return false;
}
if (!srcImage2.data){
printf("读取错误!\n");
return false;
}
split(srcImage1, channels);
imageGreenchannel = channels.at(1);
Mat imageROI1 = imageGreenchannel(Rect(50, 50, srcImage2.cols,
srcImage2.rows));
addWeighted(imageROI1, 1.0, srcImage2, 0.5, 0., imageROI1);
merge(channels, srcImage1);
namedWindow("游戏原画+logo蓝色通道");
imshow("游戏原画+logo蓝色通道", srcImage1);
imwrite("1.1.jpg", srcImage1);
Mat imageRedchannel;
srcImage1 = imread("dch.jpg");
srcImage2 = imread("wxr.jpg", 0);
if (!srcImage1.data){
printf("读取错误!\n");
return false;
}
if (!srcImage2.data){
printf("读取错误!\n");
return false;
}
split(srcImage1, channels);
imageRedchannel = channels.at(2);
Mat imageROI2 = imageRedchannel(Rect(50, 50, srcImage2.cols,
srcImage2.rows));
addWeighted(imageROI2, 1.0, srcImage2, 0.5, 0., imageROI2);
merge(channels, srcImage1);
namedWindow("游戏原画+logo蓝色通道");
imshow("游戏原画+logo蓝色通道", srcImage1);
imwrite("1.2.jpg", srcImage1);
return true;
}
程序执行结果如图;
5.4图像杜比对亮度值调整:
#include<opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace cv;
using namespace std;
#include<iostream>
static void on_ContrastAndBright(int ,void*);
static void ShowHelpText();
int g_nContrastValue, g_nBrightValue;
Mat g_srcImage, g_dstImage;
int main()
{
g_srcImage = imread("dch.jpg");
if (!g_srcImage.data){
printf("读取错误!\n");
return false;
}
g_dstImage = Mat::zeros(g_srcImage.size(), g_srcImage.type());
g_nContrastValue = 80;//设置对比度初始值
g_nBrightValue=80;//设置亮度初始值
namedWindow("效果图窗口", 1);
createTrackbar("对比度", "效果图窗口", &g_nContrastValue, 300,
on_ContrastAndBright);
createTrackbar("亮度", "效果图窗口", &g_nBrightValue, 200,
on_ContrastAndBright);
on_ContrastAndBright(g_nContrastValue, 0);//回调函数初始化
on_ContrastAndBright(g_nBrightValue, 0);//回调函数初始化
while (char(waitKey(1)) != 'q'){}
return 0;
}
static void on_ContrastAndBright(int, void*)
{
namedWindow("原始图窗口", 1);
//三个for循环执行g_dstImage(i,j)=a*g_srcImage(i,j)+b
for (int y = 0; y < g_srcImage.rows; y++)
{
for (int x = 0; x < g_srcImage.cols; x++)
{
for (int c = 0; c < 3; c++)
{
g_dstImage.at<Vec3b>(y, x)[c] = saturate_cast<uchar>((
g_nContrastValue*0.01)*(g_srcImage.at<Vec3b>(y, x)[c]) +
g_nBrightValue);
/*saturate_cast<uchar>模板函数用于溢出保护大致原理如下
if(data<0)
data=0;
else if(data>255)
data=255;*/
}
}
}
imshow("原始图窗口", g_srcImage);
imshow("效果图窗口", g_dstImage);
imwrite("5.4.jpg", g_dstImage);
}
程序执行结果如图;
5.5离散傅里叶变换:
#include<opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>
using namespace cv;
using namespace std;
int main()
{
//1.以灰度模式读取原始图像
Mat srcImage = imread("dch.jpg",0);
if (!srcImage.data){
printf("读取错误!\n");
return false;
}
imshow("原始图像", srcImage);
//2.将输入图像延拓到最佳尺寸,边界用0填充
int m = getOptimalDFTSize(srcImage.rows);
int n = getOptimalDFTSize(srcImage.cols);
Mat padded;
copyMakeBorder(srcImage, padded, 0, m - srcImage.rows, 0, n -
srcImage.cols, BORDER_CONSTANT, Scalar::all(0));
//3.魏傅里叶变换的结果(实部和虚部)分配存储空间
Mat planes[] = {
Mat_<float>(padded), Mat::zeros(padded.size(), CV_32F)
};
Mat complexI;
merge(planes, 2, complexI);
//4.就行就地离散傅里叶变换
dft(complexI, complexI);
//5.将复数转换为幅值
split(complexI, planes);
//planes[0] = Re(DFT(I), planes[1] = Im(DFT(I))
magnitude(planes[0], planes[1],planes[0]);
Mat magnitudeImage = planes[0];
//6.进行对数尺度缩放
magnitudeImage += Scalar::all(1);
log(magnitudeImage, magnitudeImage);
//7.剪切和重分布幅度图像限,
magnitudeImage = magnitudeImage(Rect(0, 0, magnitudeImage.cols&-2,
magnitudeImage.rows&-2));
int cx = magnitudeImage.cols / 2;
int cy = magnitudeImage.rows / 2;
Mat q0(magnitudeImage, Rect(0, 0, cx, cy));
Mat q1(magnitudeImage, Rect(cx, 0, cx, cy));
Mat q2(magnitudeImage, Rect(0, cy, cx, cy));
Mat q3(magnitudeImage, Rect(cx,cy, cx, cy));
//交换象限
Mat tmp;
q0.copyTo(tmp);
q3.copyTo(q0);
tmp.copyTo(q3);
q1.copyTo(tmp);
q2.copyTo(q1);
tmp.copyTo(q2);
//8.归一化,用0-1之间的浮点值将矩阵变换为可视的图像格式
normalize(magnitudeImage, magnitudeImage, 0, 1, NORM_MINMAX);
//9.显示效果图
imshow("频谱赋值", magnitudeImage);
imwrite("5.5.jpg", magnitudeImage);
waitKey();
return 0;
}
程序执行结果如图:
原图为:
傅里叶图谱为:
5.6.3 XML和YAML文件的写入:
#include<opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>
using namespace cv;
using namespace std;
#include <time.h>
int main()
{
FileStorage fs("test.yaml", FileStorage::WRITE);
//文件写入
fs << "frameCout" << 5;
time_t rawtime; time(&rawtime);
fs << "calibrationDate" << asctime(localtime(&rawtime));
Mat cameraMatrix = (Mat_<double>(3, 3) << 1000, 0, 320, 0, 1000, 240, 0, 0, 1);
Mat distCoeffs = (Mat_<double>(5, 1) << 0.1, 0.01, -0.001, 0, 0);
fs << "cameraMatrix" << cameraMatrix << "distCoeffs" << distCoeffs;
fs << "features" << "[";
for (int i = 0; i < 3; i++)
{
int x = rand() % 640;
int y = rand() % 480;
uchar lbp = rand() % 256;
fs << "{:" << "x" << x << "y" << y << "lbp" << "[:";
for (int j = 0; j < 8; j++)
fs << ((lbp >> j) & 1);
fs << "]" << "}";
}
fs << "]";
fs.release();
printf("文件读写完毕");
getchar();
return 0;
}
程序执行结果如图:
5.6.4 XML和YAML文件的读取:
#include<opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include<iostream>
using namespace cv;
using namespace std;
#include <time.h>
int main()
{
//改变console字体的颜色
system("color 6F");
FileStorage fs2("test.yaml", FileStorage::READ);
//第一种方法,对FileNode操作
int frameCount = (int)fs2["frameCount"];
std::string date;
//第二种方法,使用FileNode运算符>>
fs2["calibrationDate"] >> date;
Mat cameraMatrix2, disCoeffs2;
fs2["cameraMatrix"] >> cameraMatrix2;
fs2["disCoeffs2"] >> disCoeffs2;
cout << "frameCount:" << frameCount << endl
<< "calibration date" << date<< endl
<< "camera matrix" << cameraMatrix2<< endl
<< "distortion coeffs" << disCoeffs2 << endl;
FileNode features = fs2["features"];
FileNodeIterator it = features.begin(), it_end = features.end();
int idx = 0;
std::vector<uchar>lbpval;
for (; it != it_end; ++it, idx++)
{
cout << "feature#" << idx << ":";
cout << "x=" << (int)(*it)["x"] << ",y=" << (int)(*it)["y"] <<
",lbp:(";
//也可以使用FileNode>>三通道::vector操作符读取阵列
(*it)["lbp"] >> lbpval;
for (int i = 0; i < (int)lbpval.size(); i++)
cout << " " << (int)lbpval[i];
cout << ")" << endl;
}
fs2.release();
printf("文件读取完毕");
getchar();
return 0;
}
程序执行结果如图: