从0开发游戏引擎之纹理管理器实现 纹理数据绑定OpenGL滤波方式选择线性滤波

         这一章主要是讲如何把一张图片加载进内存中转换BGR->RGB格式,然后和OpenGL做绑定渲染出来。

        这一章讲解的顺序完全反过来了。基础好的读者也可以反过来看,先看重点,然后再回过头来看源码。没接触过的读者可以先简单的读一遍源码,然后再看重点分析,这样看重点分析的话会更有感觉。源码的话文章写的差不多了,我再整理一下会放到群里。

先贴出完整代码

 TextureManager.h

#pragma once
#include <list>

struct  textureHeight
{
public:
	GLfloat m_iLowHeight=0;//最低高度
	GLfloat	m_iHighHeight=0;//最高高度
	GLfloat m_iOptimalHeight=0;//最佳高度
};

class CCTexture
{
	friend class CTextureManager;
private:
	char name[255]; //纹理路径
	GLuint type;	//类型,保存了一个16进制数
	GLuint texID;	//纹理ID
	UINT  width;     //图片的宽
	UINT height;	//图片的高
	UINT uReference; //引用计数
	int bytesPerPixel; //每个像素的位(Bit)
	GLubyte *imageData;//纹理数据(unsigned char)
	Vector3D centerPos ={0,0,1};

public:
	textureHeight *Tex=new textureHeight;
	CCTexture();
	CCTexture(const char *fileName,float width=1,float height=1);
	CCTexture(const char * name, int wid, int hei, int BitCount);
	int getbyte() { return bytesPerPixel; }
	GLuint getType() { return type; }
	char *getName() { return name; }
	GLuint getTextureID() { return texID; }
	UINT getWidth() { return width; }
	UINT getHeight() { return height; }
	BOOL saveBmp(const char * fileName);
	GLubyte *getImageData() { return imageData; }
	void setImageData(GLubyte *data) { this->imageData = data; }
	void setCenterPos(Vector3D centerPoint);
	void draw();

	~CCTexture() ;

public:
	
	
};

class CTextureManager
{
private:
	std::list<CCTexture*> textureList;
	static CTextureManager*instance;

public:
	static CTextureManager*getInstance();
	CCTexture* addTexture(char *fileName,float width=1,float height=1);
	CCTexture *addTexture(const char*fileName);
	CCTexture * addTexture(char *name, int width, int height, int pixelFormat);
	void resetRef();//把引用计数全都归零
	void releaseRef();//把引用计数全都归零
	void CreateGLTextures(int wid, int hei, int biBitCount, OUT GLuint &texture, OUT GLubyte*&textureData);

	CTextureManager();
	~CTextureManager();
};

TextureManager.cpp

#include "Engine.h"

CTextureManager*CTextureManager::instance = NULL;


CCTexture::CCTexture()
{
	texID = 0;
	uReference = 1;
	imageData = NULL;
}


CCTexture::CCTexture(const char* fileName,float wid,float hei)
{
	texID = 0;
	CImage img;
	HRESULT hr = img.Load(CUser::getInstance()->CharToWchar(fileName));
	if (FAILED(hr))
	{
		char temp[256];
		sprintf_s(temp, 256, "%s打开失败", fileName);
		MessageBox(NULL, CUser::getInstance()->CharToWchar(temp), L"错误", 0);
		return;
	}

	strcpy(name, fileName);
	HBITMAP hbmp = img;
	BITMAP bm;
	GetObject(hbmp, sizeof(bm), &bm);

	if (wid > 1 && hei > 1)
	{
		width = wid;	height = hei;
	}
	else 
	{
		width = bm.bmWidth;	height = bm.bmHeight;
	}
	
	bytesPerPixel = bm.bmBitsPixel / 8;

	if (bytesPerPixel == 3)
	{
		type = GL_RGB;
	}
	else if (bytesPerPixel == 4)
	{
		type = GL_RGBA;
	}
	else if (bytesPerPixel == 1)
	{
		type = GL_LUMINANCE;
	}
		
	imageData = new GLubyte[width*height*bytesPerPixel];
	memcpy(imageData, bm.bmBits, width*height*bytesPerPixel);
	// Convert From BGR To RGB Format And Add An Alpha Value Of 255
	if (bytesPerPixel != 1)
	{
		for (long i = 0; i < width * height; i++)
		{
			GLubyte temp = imageData[0 + i*bytesPerPixel];
			imageData[0 + i*bytesPerPixel] = imageData[2 + i*bytesPerPixel];
			imageData[2 + i*bytesPerPixel] = temp;
		}
	}
	else //等于8位时处理的
	{
		for (int i = 0; i < height; i++)
		{
			memcpy(imageData + i*width, img.GetPixelAddress(0, i), width);
		}
	}
	glGenTextures(1, (GLuint*)&texID);
	glBindTexture(GL_TEXTURE_2D, texID);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);//纹理过滤模式
	gluBuild2DMipmaps(GL_TEXTURE_2D, type, width, height, type, GL_UNSIGNED_BYTE, imageData);
	
	
	/*glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T, GL_REPEAT);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
		glTexImage2D(GL_TEXTURE_2D, 0, type, width, height, 0, type, GL_UNSIGNED_BYTE, imageData);*/
}
	

CCTexture::CCTexture(const char *name, int wid, int hei, int BitCount)
{
	uReference =1;
	strcpy(this->name,name);
	bytesPerPixel = BitCount / 8;
	this->width = wid;
	this->height = hei;
	glGenTextures(1, &texID);
	glBindTexture(GL_TEXTURE_2D, texID);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	imageData = new GLubyte[width*height*bytesPerPixel];
	//memset(imageData, 0, width*height*bytesPerPixel);


	if (bytesPerPixel == 1)
	{
		for (int i = 0; i < width*height; ++i)
		{
			imageData[i] = 0;
		}
		type = GL_LUMINANCE;
	}
	else if (bytesPerPixel == 3)
	{
		for (int i = 0; i < width*height; ++i)
		{
			imageData[i * 3 + 0] = 0;
			imageData[i * 3 + 1] = 0;
			imageData[i * 3 + 2] = 0;
		}
		type = GL_RGB;
	}
	else if (bytesPerPixel == 4)
	{
		for (int i = 0; i < width*height; ++i)
		{
			imageData[i * 4 + 0] = 0;
			imageData[i * 4 + 1] = 0;
			imageData[i * 4 + 2] = 0;
			imageData[i * 4 + 3] = 0;
		}
		type = GL_RGBA;
	}

	glTexImage2D(GL_TEXTURE_2D, 0, type, wid, hei, 0, type, GL_UNSIGNED_BYTE, imageData);

}

BOOL CCTexture::saveBmp(const char *fileName)
{
	//如果位图数据指针为0,则没有数据传入,函数返回
	if (!imageData)
		return 0;

	//灰度图
	static RGBQUAD *pColorTable = NULL;
	if (pColorTable == NULL)
	{
		pColorTable = new RGBQUAD[256];
		for (int i = 0; i < 256; i++)
		{
			pColorTable[i].rgbBlue = i;
			pColorTable[i].rgbGreen = i;
			pColorTable[i].rgbRed = i;
			pColorTable[i].rgbReserved = 0;
		}
	}

	//颜色表大小,以字节为单位,灰度图像颜色表为1024字节,彩色图像颜色表大小为0
	int colorTablesize = 0;
	if (bytesPerPixel == 1)
		colorTablesize = 1024;
	
	//待存储图像数据每行字节数为4的倍数
	int lineByte = (width * bytesPerPixel + 3) / 4 * 4;

	//以二进制写的方式打开文件
	FILE *fp = fopen(fileName, "wb");
	if (fp == 0) return 0;

	//申请位图文件头结构变量,填写文件头信息
	BITMAPFILEHEADER fileHead;
	fileHead.bfType = 0x4D42;//bmp类型

							 //bfSize是图像文件4个组成部分之和
	fileHead.bfSize = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)
		+ colorTablesize + lineByte*height;
	fileHead.bfReserved1 = 0;
	fileHead.bfReserved2 = 0;

	//bfOffBits是图像文件前三个部分所需空间之和
	fileHead.bfOffBits = 54 + colorTablesize;

	//写文件头进文件
	fwrite(&fileHead, sizeof(BITMAPFILEHEADER), 1, fp);

	//申请位图信息头结构变量,填写信息头信息
	BITMAPINFOHEADER head;
	head.biBitCount = bytesPerPixel * 8;
	head.biClrImportant = 0;
	head.biClrUsed = 0;
	head.biCompression = 0;
	head.biHeight = height;
	head.biPlanes = 1;
	head.biSize = 40;
	head.biSizeImage = lineByte*height;
	head.biWidth = width;
	head.biXPelsPerMeter = 0;
	head.biYPelsPerMeter = 0;
	//写位图信息头进内存
	fwrite(&head, sizeof(BITMAPINFOHEADER), 1, fp);

	//如果灰度图像,有颜色表,写入文件
	if (bytesPerPixel == 1)
		fwrite(pColorTable, sizeof(RGBQUAD), 256, fp);

	//写位图数据进文件
	if (bytesPerPixel == 1)
	{
		fwrite(imageData, height*lineByte, 1, fp);
	}
	else
	{
		GLubyte *newData = new GLubyte[width * height*bytesPerPixel];
		memcpy(newData, imageData, width * height*bytesPerPixel);

		for (long i = 0; i < width * height; i++)
		{
			GLubyte temp = newData[0 + i*bytesPerPixel];
			newData[0 + i*bytesPerPixel] = newData[2 + i*bytesPerPixel];
			newData[2 + i*bytesPerPixel] = temp;
		}
		fwrite(newData, height*lineByte, 1, fp);
		delete[]newData;
	}

	//关闭文件
	fclose(fp);
	return 1;
}

void CCTexture::setCenterPos(Vector3D center)
{
	this->centerPos = center;
}


void CCTexture::draw()
{
	float halfWid = width / 2;
	float halfHei = height / 2;
	Vector3D temp = centerPos;
	glEnable(GL_TEXTURE_2D);
	glEnable(GL_BLEND);
	glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
	glBindTexture(GL_TEXTURE_2D, texID);
	glBegin(GL_QUADS);
	glTexCoord2f(0, 0);	glVertex3f(centerPos.x- halfWid, centerPos.y+halfHei, 1);
	glTexCoord2f(1, 0);	glVertex3f(centerPos.x+ halfWid, centerPos.y+halfHei, 1);
	glTexCoord2f(1, 1); glVertex3f(centerPos.x + halfWid, centerPos.y-halfHei, 1);
	glTexCoord2f(0, 1); glVertex3f(centerPos.x -halfWid, centerPos.y-halfHei, 1);
	glEnd();
	glDisable(GL_TEXTURE_2D);
	glDisable(GL_BLEND);
}


CCTexture::~CCTexture()
{
	glDeleteTextures(1,&texID);
	delete imageData;
}


CTextureManager::CTextureManager()
{
}


CTextureManager*CTextureManager::getInstance()
{
	if (instance == NULL)
	{
		instance = new CTextureManager;
	}
	return instance;
}


CCTexture* CTextureManager::addTexture(char *filename,float width, float height)
{
	list<CCTexture*>::iterator iter;
	for (iter = textureList.begin(); iter != textureList.end(); iter++)
	{
		if (strcmp((*iter)->getName(), filename) == 0)
		{
			(*iter)->uReference++;
			return *iter;
		}
	}
	textureList.push_back(new CCTexture(filename));
	return textureList.back();
}


CCTexture* CTextureManager::addTexture(const char *filename)
{
	const char*lp = strrchr(filename, '.');
	char temp[255];
	if(strcmp(lp,".bmp")==0||strcmp(lp,".jpg")==0)
	{
		strcpy(temp, filename);
	}
	else 
	{
		memset(temp, 0, 255);
		strncpy(temp,filename,lp-filename);
		strcat(temp, ".png");
	}



	list<CCTexture*>::iterator iter;
	for (iter = textureList.begin(); iter != textureList.end(); iter++)
	{
		if (strcmp((*iter)->getName(), filename) == 0)
		{
			(*iter)->uReference++;
			return *iter;
		}
	}
	textureList.push_back(new CCTexture(temp));
	return textureList.back();

}


CCTexture* CTextureManager::addTexture(char *name, int width, int height, int BitCount)
{
	list<CCTexture*>::iterator iter;
	for (iter = textureList.begin(); iter != textureList.end(); iter++)
	{
		if (strcmp((*iter)->getName(), name) == 0)
		{
			(*iter)->uReference++;
			return *iter;
		}
	}
	textureList.push_back(new CCTexture(name, width, height, BitCount));
	return textureList.back();
}


void CTextureManager::releaseRef()
{
	for (auto iter = textureList.begin(); iter != textureList.end();)
	{
		if ((*iter)->uReference == 0)
		{
			delete(*iter);
			textureList.erase(iter++);
		}
		else
			iter++;
	}

}



void CTextureManager::resetRef()
{
	for (auto iter = textureList.begin(); iter != textureList.end(); iter++)
	{
		(*iter)->uReference = 0;
	}
}




void CTextureManager::CreateGLTextures(int wid, int hei, int biBitCount, OUT GLuint &texture, OUT GLubyte*&textureData)
{
	glGenTextures(1, &texture);
	glBindTexture(GL_TEXTURE_2D, texture);
	if (biBitCount == 8)
	{
		textureData = new GLubyte[wid*hei];
		memset(textureData, 0, wid*hei);
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);		// 设置过滤器为线性过滤
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		glTexImage2D(GL_TEXTURE_2D, 0, 1, wid, hei, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, textureData);
	}
	else if (biBitCount == 24)
	{
		textureData = new GLubyte[wid*hei * 3];
		memset(textureData, 0, wid*hei * 3);

		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);		// 设置过滤器为线性过滤
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		glTexImage2D(GL_TEXTURE_2D, 0, 3, wid, hei, 0, GL_RGB, GL_UNSIGNED_BYTE, textureData);
	}

}

CTextureManager::~CTextureManager()
{
	for (auto iter = textureList.begin();iter != textureList.end();)
	{
		delete (*iter);
		textureList.erase(iter++);
	}
	textureList.clear();
}

保存BMP格式图片(只能保存BMP)

这个函数当时主要是能把编辑器里刷的地形保存出去使用的。

BOOL CCTexture::saveBmp(const char *fileName)
{
	//如果位图数据指针为0,则没有数据传入,函数返回
	if (!imageData)
		return 0;

	//灰度图
	static RGBQUAD *pColorTable = NULL;
	if (pColorTable == NULL)
	{
		pColorTable = new RGBQUAD[256];
		for (int i = 0; i < 256; i++)
		{
			pColorTable[i].rgbBlue = i;
			pColorTable[i].rgbGreen = i;
			pColorTable[i].rgbRed = i;
			pColorTable[i].rgbReserved = 0;
		}
	}

	//颜色表大小,以字节为单位,灰度图像颜色表为1024字节,彩色图像颜色表大小为0
	int colorTablesize = 0;
	if (bytesPerPixel == 1)
		colorTablesize = 1024;
	
	//待存储图像数据每行字节数为4的倍数
	int lineByte = (width * bytesPerPixel + 3) / 4 * 4;

	//以二进制写的方式打开文件
	FILE *fp = fopen(fileName, "wb");
	if (fp == 0) return 0;

	//申请位图文件头结构变量,填写文件头信息
	BITMAPFILEHEADER fileHead;
	fileHead.bfType = 0x4D42;//bmp类型

							 //bfSize是图像文件4个组成部分之和
	fileHead.bfSize = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)
		+ colorTablesize + lineByte*height;
	fileHead.bfReserved1 = 0;
	fileHead.bfReserved2 = 0;

	//bfOffBits是图像文件前三个部分所需空间之和
	fileHead.bfOffBits = 54 + colorTablesize;

	//写文件头进文件
	fwrite(&fileHead, sizeof(BITMAPFILEHEADER), 1, fp);

	//申请位图信息头结构变量,填写信息头信息
	BITMAPINFOHEADER head;
	head.biBitCount = bytesPerPixel * 8;
	head.biClrImportant = 0;
	head.biClrUsed = 0;
	head.biCompression = 0;
	head.biHeight = height;
	head.biPlanes = 1;
	head.biSize = 40;
	head.biSizeImage = lineByte*height;
	head.biWidth = width;
	head.biXPelsPerMeter = 0;
	head.biYPelsPerMeter = 0;
	//写位图信息头进内存
	fwrite(&head, sizeof(BITMAPINFOHEADER), 1, fp);

	//如果灰度图像,有颜色表,写入文件
	if (bytesPerPixel == 1)
		fwrite(pColorTable, sizeof(RGBQUAD), 256, fp);

	//写位图数据进文件
	if (bytesPerPixel == 1)
	{
		fwrite(imageData, height*lineByte, 1, fp);
	}
	else
	{
		GLubyte *newData = new GLubyte[width * height*bytesPerPixel];
		memcpy(newData, imageData, width * height*bytesPerPixel);

		for (long i = 0; i < width * height; i++)
		{
			GLubyte temp = newData[0 + i*bytesPerPixel];
			newData[0 + i*bytesPerPixel] = newData[2 + i*bytesPerPixel];
			newData[2 + i*bytesPerPixel] = temp;
		}
		fwrite(newData, height*lineByte, 1, fp);
		delete[]newData;
	}

	//关闭文件
	fclose(fp);
	return 1;
}

加载文件&绑定纹理选择滤波类型

因为使用的是Windows的图像API加载的,加载进来的图像格式是BGR格式的,要从BGR转换成OpenGL需要的RGB格式。图片过滤方式选择 GL_LINEAR(也叫线性过滤,(Bi)linear Filtering)它会基于纹理坐标附近的纹理像素,计算出一个插值,近似出这些纹理像素之间的颜色。

使用线性过滤的优点与缺点
  优点:
        1.质量高:避免了在远距离情况下的采样频率低和数据频率高造成的失真和摩尔纹,效果比无Mipmap好得多。

        2.性能好:避免了不使用Mipmap下距离远时采样频率低和数据频率高而照成texture cache命中率不高(相邻Pixel采样Texel时uv相差比较大)使性能下降。

  缺点:
         1.占用显存,可使用ue的纹理流缓存优化(IO换显存)。

CCTexture::CCTexture(const char* fileName,float wid,float hei)
{
	texID = 0;
	CImage img;
	HRESULT hr = img.Load(CUser::getInstance()->CharToWchar(fileName));
	if (FAILED(hr))
	{
		char temp[256];
		sprintf_s(temp, 256, "%s打开失败", fileName);
		MessageBox(NULL, CUser::getInstance()->CharToWchar(temp), L"错误", 0);
		return;
	}

	strcpy(name, fileName);
	HBITMAP hbmp = img;
	BITMAP bm;
	GetObject(hbmp, sizeof(bm), &bm);

	if (wid > 1 && hei > 1)
	{
		width = wid;	height = hei;
	}
	else 
	{
		width = bm.bmWidth;	height = bm.bmHeight;
	}
	
	bytesPerPixel = bm.bmBitsPixel / 8;

	if (bytesPerPixel == 3)
	{
		type = GL_RGB;
	}
	else if (bytesPerPixel == 4)
	{
		type = GL_RGBA;
	}
	else if (bytesPerPixel == 1)
	{
		type = GL_LUMINANCE;
	}
		
	imageData = new GLubyte[width*height*bytesPerPixel];
	memcpy(imageData, bm.bmBits, width*height*bytesPerPixel);
	// Convert From BGR To RGB Format And Add An Alpha Value Of 255
	if (bytesPerPixel != 1)
	{
		for (long i = 0; i < width * height; i++)
		{
			GLubyte temp = imageData[0 + i*bytesPerPixel];
			imageData[0 + i*bytesPerPixel] = imageData[2 + i*bytesPerPixel];
			imageData[2 + i*bytesPerPixel] = temp;
		}
	}
	else //等于8位时处理的
	{
		for (int i = 0; i < height; i++)
		{
			memcpy(imageData + i*width, img.GetPixelAddress(0, i), width);
		}
	}
	glGenTextures(1, (GLuint*)&texID);
	glBindTexture(GL_TEXTURE_2D, texID);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);//纹理过滤模式
	gluBuild2DMipmaps(GL_TEXTURE_2D, type, width, height, type, GL_UNSIGNED_BYTE, imageData);
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
### 回答1: Gabor滤波是一种用于纹理分析的滤波,它可以提取图像中的纹理特征。Python中可以使用OpenCV库实现Gabor滤波。具体实现步骤如下: 1. 导入OpenCV库和numpy库。 2. 义Gabor滤波的参数,包括方向、频率、相位等。 3. 使用cv2.getGaborKernel()函数生成Gabor滤波核。 4. 对输入图像进行Gabor滤波,可以使用cv2.filter2D()函数。 5. 可以将滤波后的图像进行二值化或者进行其他后续处理。 以上就是Python实现Gabor滤波进行纹理提取的基本步骤。 ### 回答2: Gabor滤波是一种常用的纹理特征提取方法,可以通过在多个方向和不同频率上对图像进行过滤,以便提取出不同的特征。在Python中,可以使用SciPy库和OpenCV库实现Gabor滤波。 首先,导入所需的库: ``` import numpy as np from scipy import ndimage from scipy import signal import cv2 ``` 然后义Gabor滤波: ``` def build_filters(): filters = [] ksize = 9 for theta in np.arange(0, np.pi, np.pi / 4): for frequency in (0.1, 0.3, 0.6): kernel = cv2.getGaborKernel((ksize, ksize), 4.0, theta, frequency, 0.5, 0, ktype=cv2.CV_32F) kernel /= 1.5 * kernel.sum() filters.append(kernel) return filters ``` 在此示例中,我们义了3个不同的频率和4个不同的方向。在每个方向和频率上,我们使用OpenCV库中提供的cv2.getGaborKernel()函数创建一个Gabor核。然后,每个核都被缩放,并将所有核附加到滤波数组中。 接下来,我们可以使用这些核滤波输入图像: ``` def process(image, filters): accum = np.zeros_like(image) for kernel in filters: filtered = ndimage.convolve(image, kernel, mode='wrap') np.maximum(accum, filtered, accum) return accum ``` 在此函数中,我们遍历每个核并将其应用于输入图像。我们使用SciPy库的ndimage.convolve()函数来实现卷积,然后将获得的过滤结果与之前的结果进行比较。在这里,我们使用函数np.maximum()选择较大的值,从而获取具有更强纹理特征的像素。 最后,我们加载图像并运行这些函数: ``` # load image image = cv2.imread('image.jpg', cv2.IMREAD_GRAYSCALE) # build filters filters = build_filters() # apply filters result = process(image, filters) # show result cv2.imshow('Result', result) cv2.waitKey(0) cv2.destroyAllWindows() ``` 在这个例子中,我们使用OpenCV库加载了一个灰度图像,并将其输入到我们之前义的函数中。然后,在应用了所有Gabor核之后,我们将最终结果显示在屏幕上。 总之,Python可以实现Gabor滤波纹理提取,只需要使用SciPy和OpenCV库即可。通过处理输入图像,在不同方向和频率处卷积图像,可以提取不同的纹理特征。 ### 回答3: Gabor滤波是用于纹理提取的一种常见滤波,它是基于Gabor函数的滤波,在图像处理中被广泛应用于目标检测、人脸识别、纹理识别等领域。Python提供了多种实现库,如OpenCV、scikit-image和SciPy等。 下面以OpenCV为例介绍Python实现Gabor滤波纹理提取的步骤: 1. 导入库 ```python import cv2 import numpy as np ``` 2. 义Gabor滤波参数 ```python wavelength = 30 # 滤波波长 theta = 0 # 滤波角度 ksize = 31 # 滤波大小 sigma = 5 # 高斯核标准差 gamma = 0.5 # 滤波空间长宽比 psi = 0 # 相位偏移 ``` 3. 生成Gabor滤波 ```python kernels = cv2.getGaborKernel((ksize, ksize), sigma, theta, wavelength, gamma, psi) ``` 4. 加载图像并进行Gabor滤波 ```python img = cv2.imread('test.jpg', cv2.IMREAD_GRAYSCALE) filtered_img = cv2.filter2D(img, cv2.CV_8UC3, kernels) ``` 5. 显示结果 ```python cv2.imshow('Original Image', img) cv2.imshow('Gabor Filtered Image', filtered_img) cv2.waitKey(0) cv2.destroyAllWindows() ``` 通过以上步骤,我们可以通过Python实现Gabor滤波进行图像纹理提取。有关Gabor滤波的更多信息,请参阅相关文献或文档。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值