/***********************************************************************************************************
// 寻找最大熵阈值并分割
***********************************************************************************************************/
Mat maxEntropySegMentation(Mat inputImage)
{
const int channels[1] = { 0 };
const int histSize[1] = { 256 };
float pranges[2] = { 0,256 };
const float* ranges[1] = { pranges };
MatND hist;
calcHist(&inputImage, 1, channels, Mat(), hist, 1, histSize, ranges);
float maxentropy = 0;
int max_index = 0;
Mat result;
for (int i = 0; i < 256; i++)
{
float cur_entropy = caculateCurrentEntropy(hist, i);
if (cur_entropy > maxentropy)
{
maxentropy = cur_entropy;
max_index = i;
}
}
threshold(inputImage, result, max_index, 255, CV_THRESH_BINARY);
return result;
}
//采用kmeans方法
Mat Kmeans_threshold(Mat image)
{
Mat img = image.clone();
Mat binary;
threshold(img, binary, 0, 255, CV_THRESH_OTSU);//使用大津法代替经验阈值
//imshow("binary", binary);
//生成一维采样点,包括所有图像像素点,注意采样点格式为32bit浮点数。
//Mat points(img.cols*img.rows, 1, CV_32FC3);
Mat points(img.cols*img.rows, 1, CV_32FC1);
//标记矩阵,32位整形
Mat labels(img.cols*img.rows, 1, CV_32SC1);
uchar* p;
int i, j, k = 0;
for (i = 0; i < img.rows; i++)
{
p = img.ptr<uchar>(i);
for (j = 0; j < img.cols; j++)
{
points.at<float>(k, 0) = float(p[j]);
//points.at<Vec3f>(k, 0)[0] = float(p[j * 3]);
//points.at<Vec3f>(k, 0)[1] = float(p[j * 3 + 1]);
//points.at<Vec3f>(k, 0)[2] = float(p[j * 3 + 2]);
k++;
}
}
int clusterCount = 2;
Mat centers(clusterCount, 1, points.type());
kmeans(points, clusterCount, labels, TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 10, 1.0), 3, KMEANS_PP_CENTERS, centers);
//我们已知有3个聚类,用不同的灰度层表示。
Mat img1(img.rows, img.cols, CV_8UC1);
float step = 255 / (clusterCount - 1);
k = 0;
for (i = 0; i < img1.rows; i++)
{
p = img1.ptr<uchar>(i);
for (j = 0; j < img1.cols; j++)
{
int tt = labels.at<int>(k, 0);
k++;
p[j] = 255 - tt * step;
}
}
return img1;
//imshow("K - Means分割效果”", img1);
}
/***********************************************************************************************************
// 计算当前位置的阈值
***********************************************************************************************************/
float caculateCurrentEntropy(Mat hist, int threshold)
{
float BackgroundSum = 0, targetSum = 0;
const float* pDataHist = (float*)hist.ptr<float>(0);
for (int i = 0; i < 256; i++)
{
//累计背景值
if (i < threshold)
{
BackgroundSum += pDataHist[i];
}
//累计目标值
else
{
targetSum += pDataHist[i];
}
}
cout << BackgroundSum << "\t" << targetSum << endl;
float BackgroundEntropy = 0, targetEntropy = 0;
for (int i = 0; i < 256; i++)
{
//计算背景熵
if (i < threshold)
{
if (pDataHist[i] == 0)
continue;
float ratio1 = pDataHist[i] / BackgroundSum;
//计算当前能量熵
BackgroundEntropy += -ratio1 * logf(ratio1);
}
else //计算目标熵
{
if (pDataHist[i] == 0)
continue;
float ratio2 = pDataHist[i] / targetSum;
targetEntropy += -ratio2 * logf(ratio2);
}
}
return (targetEntropy + BackgroundEntropy);
}
/***************
*双峰法求阈值
*img : 图像
*th : 设置预估阈值
*itertime : 迭代次数
********************/
int PeakSplit(Mat &img, int th, int itertime)
{
int hist[256] = {}; //直方图数据
for (int i = 0; i < img.rows; i++)
{
for (int j = 0; j < img.cols; j++)
{
hist[img.at<uchar>(i, j)]++;
}
}
int p1 = 0, p1_v = 0; //峰值坐标 和 峰值数值
int p2 = 255, p2_v = 0;
int pth = th;
while (itertime--)
{
for (int i = p1; i <= pth; i++)
{
if (hist[i] > p1_v)
{
p1_v = hist[i];
p1 = i;
}
}
for (int i = p2; i > pth; i--)
{
if (hist[i] > p2_v)
{
p2_v = hist[i];
p2 = i;
}
}
int tmp_th = (p1 + p2) / 2; //更新阈值为两个峰值坐标中心
if (pth == tmp_th)
break;
else
pth = tmp_th;
}
return pth;
}//blog.csdn.net/u012198575/article/details/100667117