在利用传感器获取房间地图信息后由于多次测量存在误差,导致房间地图轮廓歪歪曲曲影响观感,我这里采用水平方向和垂直方向分别执行邻近线段合并的方法对这一问题做了一些处理。以下图为例,黑色表示地图轮廓,比如在垂直方向执行邻近线段合并时先获取图像3列数据作为一个计算单元,如下图的计算单元a、b、c,再将这些计算单元里邻近的线段合并成一条直线,至于计算单元c,由于其近似于曲线不进行合并,避免矫枉过正。
完整代码如下所示:
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <vector>
using namespace std;
using namespace cv;
//函数功能:排序(从小到大)
template<typename T>
void quick_sort(vector<T>& arr, int left, int right, vector<int>& arr_index)
{
int i, j;
int t1,temp1;
T temp,t;
if (left>right)
return;
temp = arr[left];
temp1 = arr_index[left];
i = left;
j = right;
while (i != j)
{
while (arr[j] >= temp && i<j)
j--;
while (arr[i] <= temp && i<j)
i++;
if (i<j)
{
t = arr[i];
arr[i] = arr[j];
arr[j] = t;
t1 = arr_index[i];
arr_index[i] = arr_index[j];
arr_index[j] = t1;
}
}
arr[left] = arr[i];
arr[i] = temp;
arr_index[left] = arr_index[i];
arr_index[i] = temp1;
quick_sort(arr,left, i - 1,arr_index);
quick_sort(arr,i + 1, right,arr_index);
}
//函数功能:根据给定索引进行排序
template<typename T>
void resort_arr(vector<T>& arr, vector<int> indexs)
{
int len = arr.size();
T* backup = new T[len];
for(int i=0; i<len; i++){
backup[i] = arr[i];
}
for(int i=0; i<len; i++){
arr[i] = backup[indexs[i]];
}
}
/*
函数功能:在二维数组中绘制指定位置的线段
arr:二维数组,可理解为二值化后的灰度图像
cols:图像的宽
ltop:线段左上位置的端点
rbottom:线段的右下位置的端点
val:绘制直线线段的值
*/
void set_array(uchar* arr, int cols, Point ltop, Point rbottom, int val)
{
// 直线绘制
if(ltop.x == rbottom.x || ltop.y == rbottom.y)
{
for(int i=ltop.y; i<=rbottom.y; i++)
{
for(int j=ltop.x; j<=rbottom.x; j++)
{
*(arr+i*cols+j) = val;
}
}
}
// 斜线绘制,绘制斜线时须在X和Y两个方向进行遍历,否则对于近乎水平或竖直的斜线绘制会有问题
else
{
float x_slope = float(rbottom.y - ltop.y) / (rbottom.x - ltop.x);
for(int i=ltop.x; i<=rbottom.x; i++)
{
int yoffset = (i-ltop.x)*x_slope + ltop.y;
*(arr+yoffset*cols+i) = val;
}
float y_slope = float(rbottom.x - ltop.x) / (rbottom.y - ltop.y);
for(int j=ltop.y; j<=rbottom.y; j++)
{
int xoffset = (j-ltop.y)*y_slope + ltop.x;
*(arr+j*cols+xoffset) = val;
}
}
}
/*
函数功能:在竖直方向和水平方向合并邻近的直线线段(对斜线不作处理)
image_data:二维数组,可以认为是二值化后的灰度图像
img_w:图像宽
img_h:图像高
merge_step:每merge_step行或列作为一个线段合并的计算单元
merge_max_gap:前后线段之间的首尾距离小于merge_max_gap才满足合并条件
merge_len_thresh:合并的两条线段的长度和大于merge_len_thresh才满足合并条件(避免斜线被合并)
*/
void merge_lines(uchar* image_data,
int img_w,
int img_h,
int merge_step = 3,
int merge_max_gap = 3,
int merge_len_thresh = 30)
{
// 竖直方向合并线段
for(int n=0; n<img_w-merge_step; n++)
{
int line_num = 0; //检测到的线段数量
vector<vector<int>> vec_lines; //存储计算单元内每列的线段首尾偏移数据
for(int x=n; x<n+merge_step; x++)
{
vector<int> vec_line;
int last_val = 0;
int head_off = 0;
int tail_off = 0;
for(int y=0; y<img_h; y++)
{
int gray_scale = *(image_data+y*img_w+x);
if(gray_scale==255 && last_val==0){
head_off = y;
}
if((gray_scale==0 && last_val==255)){
tail_off = y;
// 长度为1的线段忽略,避免处理后直角处出现断裂
if(tail_off - head_off == 1)
{
last_val = 0;
continue;
}
vec_line.push_back(head_off);
vec_line.push_back(tail_off);
line_num++;
}
last_val = gray_scale;
}
vec_lines.push_back(vec_line);
}
// 不少于2条线段执行合并操作
if(line_num >= 2)
{
vector<int> heads; //保存线段的头部偏移
vector<int> tails; //保存线段的尾部偏移
vector<int> indexs;
for(int i=0; i<vec_lines.size(); i++)
{
for(int j=0; j<vec_lines[i].size(); j+=2)
{
heads.push_back(vec_lines[i][j]);
tails.push_back(vec_lines[i][j+1]);
indexs.push_back(indexs.size());
}
}
// 对计算单元内所有的线段头部偏移进行从小到大的排序
quick_sort(heads, 0, line_num-1, indexs);
// 所有的尾部偏移根据头部偏移的排序索引重新排序
resort_arr(tails, indexs);
vector<int> merge_heads; //线段合并后的头部偏移数组,同一列需要合并的线段可能不止一条
vector<int> merge_tails; //线段合并后的尾部偏移数组
int current_merge_head = heads[0]; //当前合并线段的头部偏移
int current_merge_tail = tails[0]; //当前合并线段的尾部偏移
for(int i=1; i<line_num; i++)
{
int merge_len = current_merge_tail - current_merge_head;
int line_len = tails[i] - heads[i];
// 合并条件:当前合并线段的尾部偏移和下一条线段的头部偏移最大的距离不超过阈值,
// 且二者长度和需大于阈值,防止邻近的两条斜线被误合并
if(current_merge_tail-heads[i] > -merge_max_gap
&& merge_len+line_len > merge_len_thresh)
{
current_merge_tail = max(current_merge_tail, tails[i]);
}
else
{
if(merge_len > merge_len_thresh)
{
merge_heads.push_back(current_merge_head);
merge_tails.push_back(current_merge_tail);
}
current_merge_head = heads[i];
current_merge_tail = tails[i];
}
}
// 若最后一条线段也需要合并,则需要手动把合并线段加进来(此处可能会添加重复,不过不影响)
if(current_merge_tail - current_merge_head > merge_len_thresh)
{
merge_heads.push_back(current_merge_head);
merge_tails.push_back(current_merge_tail);
}
// 遍历计算单元里所有线段,将组成合并线段的各个子线段清除
for(int k=0; k<merge_heads.size(); k++)
{
Point merge_head_pt(-1);
Point merge_tail_pt(-1);
for(int i=0; i<vec_lines.size(); i++)
{
for(int j=0; j<vec_lines[i].size(); j+=2)
{
if(vec_lines[i][j] >= merge_heads[k] && vec_lines[i][j+1] <= merge_tails[k])
{
// 记录合并线段的头坐标点
if(vec_lines[i][j] == merge_heads[k] && merge_head_pt.x == -1)
{
merge_head_pt.x = n+i;
merge_head_pt.y = merge_heads[k];
}
// 记录合并线段的尾坐标点
if(vec_lines[i][j+1] == merge_tails[k] && merge_tail_pt.x == -1)
{
merge_tail_pt.x = n+i;
merge_tail_pt.y = merge_tails[k]-1;
}
// 将子线段的像素值置0
set_array(image_data, img_w, Point(n+i,vec_lines[i][j]), Point(n+i, vec_lines[i][j+1]-1), 0);
}
}
}
// 将合并线段的像素值置255
set_array(image_data, img_w, merge_head_pt, merge_tail_pt, 255);
}
}
}
// 水平方向合并线段(同竖直方向合并线段同理)
for(int n=0; n<img_h-merge_step; n++)
{
int line_num = 0;
vector<vector<int>> vec_lines;
for(int y=n; y<n+merge_step; y++)
{
vector<int> vec_line;
int last_val = 0;
int head_off = 0;
int tail_off = 0;
for(int x=0; x<img_w; x++)
{
int gray_scale = *(image_data+y*img_w+x);
if(gray_scale==255 && last_val==0){
head_off = x;
}
if((gray_scale==0 && last_val==255)){
tail_off = x;
if(tail_off - head_off == 1)
{
last_val = 0;
continue;
}
vec_line.push_back(head_off);
vec_line.push_back(tail_off);
line_num++;
}
last_val = gray_scale;
}
vec_lines.push_back(vec_line);
}
if(line_num >= 2)
{
vector<int> heads;
vector<int> tails;
vector<int> indexs;
for(int i=0; i<vec_lines.size(); i++)
{
for(int j=0; j<vec_lines[i].size(); j+=2)
{
heads.push_back(vec_lines[i][j]);
tails.push_back(vec_lines[i][j+1]);
indexs.push_back(indexs.size());
}
}
quick_sort(heads, 0, line_num-1, indexs);
resort_arr(tails, indexs);
vector<int> merge_heads;
vector<int> merge_tails;
int current_merge_head = heads[0];
int current_merge_tail = tails[0];
for(int i=1; i<line_num; i++)
{
int merge_len = current_merge_tail - current_merge_head;
int line_len = tails[i] - heads[i];
if(current_merge_tail-heads[i] > -merge_max_gap
&& merge_len+line_len > merge_len_thresh)
{
current_merge_tail = max(current_merge_tail, tails[i]);
}
else
{
if(merge_len > merge_len_thresh)
{
merge_heads.push_back(current_merge_head);
merge_tails.push_back(current_merge_tail);
}
current_merge_head = heads[i];
current_merge_tail = tails[i];
}
}
if(current_merge_tail - current_merge_head > merge_len_thresh)
{
merge_heads.push_back(current_merge_head);
merge_tails.push_back(current_merge_tail);
}
for(int k=0; k<merge_heads.size(); k++)
{
Point merge_head_pt(-1,-1);
Point merge_tail_pt(-1,-1);
for(int i=0; i<vec_lines.size(); i++)
{
for(int j=0; j<vec_lines[i].size(); j+=2)
{
if(vec_lines[i][j] >= merge_heads[k] && vec_lines[i][j+1] <= merge_tails[k])
{
if(vec_lines[i][j] == merge_heads[k] && merge_head_pt.x == -1)
{
merge_head_pt.x = merge_heads[k];
merge_head_pt.y = n+i;
}
if(vec_lines[i][j+1] == merge_tails[k] && merge_tail_pt.x == -1)
{
merge_tail_pt.x = merge_tails[k]-1;
merge_tail_pt.y = n+i;
}
set_array(image_data, img_w, Point(vec_lines[i][j], n+i), Point(vec_lines[i][j+1]-1, n+i), 0);
}
}
}
set_array(image_data, img_w, merge_head_pt, merge_tail_pt, 255);
}
}
}
}
int main()
{
Mat src_img = imread("../mergeLines/test.png", IMREAD_GRAYSCALE);
imshow("合并线段前", src_img);
// 合并邻近的直线
merge_lines(src_img.data, src_img.cols, src_img.rows, 3, 3, 30);
imshow("合并线段后", src_img);
waitKey();
return 0;
}
最后效果如下: