using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using OpenCvSharp;
namespace OpenCVTest
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private void Form1_Load(object sender, EventArgs e)
{
P3();
}
public void P3()
{
Mat kernel = new Mat(3, 3, MatType.CV_8UC1);
Mat kernel_1 = new Mat(1, 1, MatType.CV_8UC1);
Mat kernel_2 = Cv2.GetStructuringElement(MorphShapes.Cross, new Size(3,3));
Mat mask = new Mat();
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\21.jpg", ImreadModes.Color);
Mat source = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\21.jpg", ImreadModes.Color);
Mat gaosi = new Mat();
Mat dst = new Mat();
// Cv2.MorphologyEx(src, src, MorphTypes.Open, kernel, null, 3);
Cv2.GaussianBlur(src, gaosi, new OpenCvSharp.Size(7, 7), 0);
Mat xx = KMeansHelper.GetMat(gaosi, 100);
Cv2.GaussianBlur(xx, xx, new OpenCvSharp.Size(3, 3), 0);
Cv2.ImShow("src", src);
Mat cannyedge = new Mat();
Cv2.Canny(xx, cannyedge, 10, 80, 3);
Cv2.ImShow("cannyedge", cannyedge);
mask = cannyedge;
//Cv2.GaussianBlur(mask, mask, new OpenCvSharp.Size(1, 1), 0);
Cv2.MorphologyEx(mask, mask, MorphTypes.Close, kernel_2, null, 3);
Cv2.Threshold(mask, mask, 10, 255, ThresholdTypes.Binary);
Cv2.Blur(mask, mask, new OpenCvSharp.Size(3,3));
// Cv2.GaussianBlur(mask, mask, new OpenCvSharp.Size(1,1), 0);
Cv2.Threshold(mask, mask, 10, 255, ThresholdTypes.Binary);
// Cv2.MorphologyEx(mask, mask, MorphTypes.Erode, kernel_1, null, 3);
//Cv2.ImShow("mask2", mask);
// Cv2.MorphologyEx(mask, mask, MorphTypes.Erode, kernel, null, 3);
// Cv2.Canny(mask, mask, 10, 80, 3);
Cv2.ImShow("mask1", mask);
//
HandleImg(mask);
Cv2.ImShow("Mask Last", mask);
HandleMask(mask);
Cv2.ImShow("Mask 3 Last", mask);
Mat res = HandleSrcToMask(source, mask);
Cv2.ImShow("res", res);
}
public void P2()
{
Mat kernel = new Mat(3, 3, MatType.CV_8UC1);
Mat mask = new Mat();
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a1.jpg", ImreadModes.Color);
Mat srcgary = new Mat();
Mat dst = new Mat();
Mat gaosi = new Mat();
Cv2.CvtColor(src, srcgary, ColorConversionCodes.BGR2BGRA);
// Cv2.Split(hsv, out var planes);
Cv2.GaussianBlur(srcgary, gaosi, new OpenCvSharp.Size(7, 7), 0);
Mat xx = KMeansHelper.GetMat(gaosi, 100);
Cv2.GaussianBlur(xx, xx, new OpenCvSharp.Size(3, 3), 0);
// Cv2.ImShow("gaosi", gaosi);
// Mat x1 = KMeansHelper.GetMat(xx, 100);
//Cv2.GaussianBlur(x1, xx, new OpenCvSharp.Size(3, 3), 0);
Cv2.ImShow("xx", xx);
//Cv2.Scharr(src, dst, MatType.CV_32F, 1, 0);
//Cv2.ConvertScaleAbs(dst, dst);
//Cv2.ImShow("after dst", dst);
Mat cannyedge = new Mat();
//Cv2.CvtColor(xx, xx, ColorConversionCodes.BGR2GRAY);
Cv2.Canny(xx, cannyedge,10, 80, 3);
// Mat kernel = new Mat(3, 3, MatType.CV_8UC1);
// Cv2.MorphologyEx(cannyedge, cannyedge, MorphTypes.Close, kernel,null,3);
Cv2.ImShow("cannyedge", cannyedge);
// MyFindEdgeContours(cannyedge, 30, 1);
mask = cannyedge;
//Cv2.GaussianBlur(cannyedge,mask, new OpenCvSharp.Size(1, 1), 0);
//Cv2.ImShow("mask", mask);
Cv2.MorphologyEx(mask, mask, MorphTypes.Open, kernel, null, 3);
//Cv2.MorphologyEx(mask, mask, MorphTypes.Dilate, kernel,null,3);
//Cv2.MorphologyEx(mask, mask, MorphTypes.Erode, kernel, null, 3);
//Cv2.MorphologyEx(mask, mask, MorphTypes.Dilate, kernel, null, 3);
Cv2.MorphologyEx(mask, mask, MorphTypes.Close, kernel, null, 3);
Cv2.ImShow("mask", mask);
//var tasks = new List<Task<int>>();
//tasks.Add(Task<int>.Factory.StartNew(() => { return FixLayOutRow_Mins(filtercanny); }));
//tasks.Add(Task<int>.Factory.StartNew(() => { return FixLayOutRow_Plus(filtercanny); }));
//tasks.Add(Task<int>.Factory.StartNew(() => { return FixLayOutCol_Plus(filtercanny); }));
//tasks.Add(Task<int>.Factory.StartNew(() => { return FixLayOutCol_Mins(filtercanny); }));
//Task.WaitAll(tasks.ToArray());
// Cv2.ImShow("afer fix cannyedge", filtercanny);
Cv2.Canny(cannyedge, cannyedge, 30, 100, 3);
Cv2.ImShow("afer2 fix cannyedge", cannyedge);
//Cv2.ImShow("PreMask", filtercanny);
//HandleImg(filtercanny);
Cv2.ImShow("Mask", mask);
//HandleMask(filtercanny);
// Cv2.ImShow("MaskLast", filtercanny);
//Mat res = HandleSrcToMask(src, filtercanny);
//Cv2.ImShow("res", res);
// byte[] kernelValues = { 0, 1, 0, 1, 1, 1, 0, 1, 0 }; // cross (+)
// //byte[] kernelValues = { 0, 0, 0, 0, 0, 0, 0, 0, 0 }; // cross (+)
// Mat kernel = new Mat(3, 3, MatType.CV_8UC1, kernelValues);
// Cv2.Dilate(res, res, kernel);
// Cv2.ImShow("res", res);
}
public void FixLayOutPoint(Mat img,int row,int col)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
}
}
}
public void FixRowLevelPoint(Mat img,int nowrow,int col)
{
int m = 0;
for (int row = nowrow; row > 0; row-=2)
{
m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
}
else
{
break;
}
}
for (int row = nowrow; row < img.Rows; row+=2)
{
m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
}
else
{
break;
}
}
}
public void FixColLevelPoint(Mat img, int row, int nowcol)
{
int m = 0;
for (int col = nowcol; col > 0; col-=2)
{
m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
}
else
{
break;
}
}
for (int col = nowcol; col < img.Cols; col+=2)
{
m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
}
else
{
break;
}
}
}
public void FixLayOut(Mat img)
{
for (int row = 0; row < img.Rows; row++)
{
for (int col = 0; col < img.Cols; col++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixRowLevelPoint(img, row, col);
}
}
else
{
break;
}
}
for (int col = img.Cols - 1; col > 0; col--)
{
int m = img.At<byte>(row, col);
if (m == 0 )
{
if (GetRowSpace(img, row, col, 10)< 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixRowLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
for (int col = 0; col < img.Cols; col++)
{
for (int row = 0; row < img.Rows; row++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetColSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixColLevelPoint(img, row, col);
}
}
else
{
break;
}
}
for (int row = img.Rows - 1; row > 0; row--)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetColSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixColLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
}
public int FixLayOutRow(Mat img)
{
for (int row = 0; row < img.Rows; row++)
{
for (int col = 0; col < img.Cols; col++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixRowLevelPoint(img, row, col);
}
}
else
{
break;
}
}
for (int col = img.Cols - 1; col > 0; col--)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixRowLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
return 0;
}
public int FixLayOutRow_Plus(Mat img)
{
for (int row = 0; row < img.Rows; row++)
{
for (int col = 0; col < img.Cols; col++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 20) < 10)
{
img.At<byte>(row, col) = 255;
break;
}
else
{
FixRowLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
return 0;
}
public int FixLayOutRow_Mins(Mat img)
{
for (int row = 0; row < img.Rows; row++)
{
for (int col = img.Cols - 1; col > 0; col--)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetRowSpace(img, row, col, 22) < 10)
{
img.At<byte>(row, col) = 255;
break;
}
else
{
FixRowLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
return 0;
}
public int FixLayOutCol(Mat img)
{
for (int col = 0; col < img.Cols; col++)
{
for (int row = 0; row < img.Rows; row++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetColSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixColLevelPoint(img, row, col);
}
}
else
{
break;
}
}
for (int row = img.Rows - 1; row > 0; row--)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetColSpace(img, row, col, 10) < 5)
{
img.At<byte>(row, col) = 255;
}
else
{
FixColLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
return 0;
}
public int FixLayOutCol_Plus(Mat img)
{
for (int col = 0; col < img.Cols; col++)
{
for (int row = 0; row < img.Rows; row++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetColSpace(img, row, col, 20) < 10)
{
img.At<byte>(row, col) = 255;
break;
}
else
{
FixColLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
return 0;
}
public int FixLayOutCol_Mins(Mat img)
{
for (int col = 0; col < img.Cols; col++)
{
for (int row = img.Rows - 1; row > 0; row--)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
if (GetColSpace(img, row, col, 20) < 10)
{
img.At<byte>(row, col) = 255;
break;
}
else
{
FixColLevelPoint(img, row, col);
}
}
else
{
break;
}
}
}
return 0;
}
public void HandleImg(Mat img)
{
for(int row = 0; row < img.Rows; row++)
{
for(int col = 0; col < img.Cols; col++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
// if(GetColSpace(img, row, col, 10) > 5)
{
img.At<byte>(row, col) = 128;
SetMaskShodowAreaVertical(img, row, col);
}
}
else if (m == 128)
{
// if (GetColSpace(img, row, col) >5)
{
SetMaskShodowAreaVertical(img, row, col);
}
}
else
{
break;
}
}
for (int col = img.Cols - 1; col > 0; col--)
{
int m = img.At<byte>(row, col);
if (m == 0 )
{
// if (GetColSpace(img, row, col, 10) > 5)
{
img.At<byte>(row, col) = 128;
SetMaskShodowAreaVertical(img, row, col);
}
}
else if (m == 128)
{
SetMaskShodowAreaVertical(img, row, col);
}
else
{
break;
}
}
}
for (int col = 0; col < img.Cols; col++)
{
for (int row = 0; row < img.Rows; row++)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
//if (GetRowSpace(img, row, col,10) > 5)
{
img.At<byte>(row, col) = 128;
SetMaskShodowAreaHorizontal(img, row, col);
}
}
else if (m == 128)
{
// if (GetRowSpace(img, row, col, 10) > 5)
{
SetMaskShodowAreaHorizontal(img, row, col);
}
}
else
{
break;
}
}
for (int row = img.Rows - 1; row > 0; row--)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
// if (GetRowSpace(img, row, col, 10) > 5)
{
img.At<byte>(row, col) = 128;
SetMaskShodowAreaHorizontal(img, row, col);
}
}
else if (m == 128)
{
// if (GetRowSpace(img, row, col, 10) > 5)
{
SetMaskShodowAreaHorizontal(img, row, col);
}
// if (GetHSpace(img, row, col) > 10)
//{
//}
}
else
{
break;
}
}
}
}
public void HandleMask(Mat img)
{
for (int row = 0; row < img.Rows; row++)
{
for (int col = 0; col < img.Cols; col++)
{
int m = img.At<byte>(row, col);
if (m == 128)
{
img.At<byte>(row, col) = 0;
}
else
{
img.At<byte>(row, col) = 255;
}
}
}
}
public Mat HandleSrcToMask(Mat src,Mat mask)
{
RNG rng = new RNG(12345);
Vec3b color = new Vec3b();
color[0] = (byte)rng.Uniform(0, 255);
color[2] = (byte)rng.Uniform(0, 255);
color[1] = (byte)rng.Uniform(0, 255);
Mat result = new Mat(src.Size(), src.Type());
double w = 0.0; //融合权重
int b = 0, g = 0, r = 0;
int b1 = 0, g1 = 0, r1 = 0;
int b2 = 0, g2 = 0, r2 = 0;
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
int m = mask.At<byte>(row, col);
if (m == 255)
{
result.At<Vec3b>(row, col) = src.At<Vec3b>(row, col); // 前景
}
else if (m == 0)
{
result.At<Vec3b>(row, col) = color; // 背景
}
else
{/* 融合处理部分 */
w = m / 255.0;
b1 = src.At<Vec3b>(row, col)[0];
g1 = src.At<Vec3b>(row, col)[1];
r1 = src.At<Vec3b>(row, col)[2];
b2 = color[0];
g2 = color[1];
r2 = color[2];
b = (int)(b1 * w + b2 * (1.0 - w));
g = (int)(g1 * w + g2 * (1.0 - w));
r = (int)(r1 * w + r2 * (1.0 - w));
result.At<Vec3b>(row, col)[0] = (byte)b;
result.At<Vec3b>(row, col)[1] = (byte)g;
result.At<Vec3b>(row, col)[2] = (byte)r;
}
}
}
// Cv2.ImShow("SRC ", src);
return result;
}
public void SetMaskShodowAreaVertical(Mat img,int nowrow,int col)
{
for(int row= nowrow; row > 0; row-=2)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
img.At<byte>(row, col) = 128;
}
else if (m == 128)
{
}
else
{
break;
}
}
for (int row = nowrow; row < img.Rows; row+=2)
{
int m = img.At<byte>(row, col);
if (m == 0)
{
img.At<byte>(row, col) = 128;
}
else if (m == 128)
{
}
else
{
break;
}
}
}
public void SetMaskShodowAreaHorizontal(Mat img, int row, int nowcol)
{
int m = 0;
for (int col = nowcol; col> 0; col-=2)
{
m = img.At<byte>(row, col);
if (m == 0)
{
img.At<byte>(row, col) = 128;
}
else if (m == 128)
{
}
else
{
break;
}
}
for (int col = nowcol; col < img.Cols; col+=2)
{
m = img.At<byte>(row, col);
if (m == 0)
{
img.At<byte>(row, col) = 128;
}
else if (m == 128)
{
}
else
{
break;
}
}
}
public int GetRowSpace(Mat img, int nowrow, int col,int maxlength=200)
{
int space = 0;
for (int row = nowrow; row > 0; row--)
{
if (space > maxlength)
{
break;
}
int m = img.At<byte>(row, col);
if (m == 0)
{
space++;
}
else if (m ==128)
{
space++;
}
else
{
break;
}
}
for (int row = nowrow; row < img.Rows; row++)
{
if (space > maxlength)
{
break;
}
int m = img.At<byte>(row, col);
if (m == 0)
{
space++;
}
else if (m == 128)
{
space++;
}
else
{
break;
}
}
return space;
}
public int GetColSpace(Mat img,int row,int nowcol, int maxlength = 200)
{
int space = 0;
int m = 0;
for (int col = nowcol; col > 0; col--)
{
if (space > maxlength)
{
break;
}
m = img.At<byte>(row, col);
if (m == 0)
{
space++;
}
else if (m == 128)
{
space++;
}
else
{
break;
}
}
for (int col = nowcol; col < img.Cols; col++)
{
if (space > maxlength)
{
break;
}
m = img.At<byte>(row, col);
if (m == 0)
{
space++;
}
else if (m == 128)
{
space++;
}
else
{
break;
}
}
return space;
}
private void MorphologyTest()
{
var gray = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
var binary = new Mat();
var dilate1 = new Mat();
var dilate2 = new Mat();
byte[] kernelValues = { 0, 1, 0, 1, 1, 1, 0, 1, 0 }; // cross (+)
var kernel = new Mat(3, 3, MatType.CV_8UC1, kernelValues);
// Binarize
Cv2.Threshold(gray, binary, 0, 255, ThresholdTypes.Mask);
// empty kernel
Cv2.Dilate(binary, dilate1, null);
// + kernel
Cv2.Dilate(binary, dilate2, kernel);
Cv2.ImShow("binary", binary);
Cv2.ImShow("dilate (kernel = null)", dilate1);
Cv2.ImShow("dilate (kernel = +)", dilate2);
}
public void MixChannelTest()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
var rgba = new Mat(300, 300, MatType.CV_8UC4, new Scalar(50, 100, 150, 200));
var bgr = new Mat(rgba.Rows, rgba.Cols, MatType.CV_8UC3);
var alpha = new Mat(rgba.Rows, rgba.Cols, MatType.CV_8UC1);
Mat[] input = { rgba };
Mat[] output = { bgr, alpha };
// rgba[0] -> bgr[2], rgba[1] -> bgr[1],
// rgba[2] -> bgr[0], rgba[3] -> alpha[0]
int[] fromTo = { 0, 2, 1, 1, 2, 0, 3, 3 };
Cv2.MixChannels(input, output, fromTo);
Cv2.ImShow("rgba", rgba);
Cv2.ImShow("bgr", bgr);
Cv2.ImShow("alpha", alpha);
}
public void SplitMergeTest()
{
Mat result = new Mat();
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
src.CvtColor(ColorConversionCodes.BGR2RGBA);
Cv2.Split(src, out var planes);
Cv2.ImShow("planes 0", planes[0]);
Cv2.ImShow("planes 1", planes[1]);
Cv2.ImShow("planes 2", planes[2]);
Cv2.BitwiseNot(planes[1], planes[1]);
// Merge
var merged = new Mat();
Cv2.Merge(planes, merged);
Cv2.ImShow("src", src);
Cv2.ImShow("merged", merged);
}
private void P1()
{
Mat result = new Mat();
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
Mat dst = new Mat();
Mat gaosi = new Mat();
Cv2.GaussianBlur(src, gaosi, new OpenCvSharp.Size(5, 5), 0);
Cv2.ImShow("gapsi", gaosi);
Mat xx = KMeansHelper.GetMat(gaosi, 3);
Cv2.ImShow("xx", xx);
Mat cannyedge = new Mat();
Cv2.CvtColor(gaosi, gaosi, ColorConversionCodes.BGR2GRAY);
Cv2.Canny(gaosi, cannyedge, 80, 80, 3);
Cv2.ImShow("edge", cannyedge);
result = CheckContours(cannyedge);
Cv2.ImShow("result", result);
// Cv2.ApproxPolyDP(cannyedge, result, 1, true);
result = MyFindEdgeContours(cannyedge);
// Mat element = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(trackBar_mask.Value, trackBar_mask.Value), new OpenCvSharp.Point(trackBar_mask.Value / 2, trackBar_mask.Value / 2));
Cv2.MorphologyEx(src, dst, MorphTypes.Close, null);
Cv2.ImShow("result", result);
Cv2.ImShow("dst", dst);
Mat Canny_Edge2 = new Mat();
Cv2.Canny(cannyedge, Canny_Edge2, 20, 80, 3);
Cv2.ImShow("Canny_Edge2", Canny_Edge2);
Mat Sobel_Edge = new Mat();
Cv2.Sobel(gaosi, Sobel_Edge, MatType.CV_16S, 1, 0, 3, 1);
Cv2.ImShow(" Sobel_Edge", Sobel_Edge);
Mat Edge_Laplacian = new Mat();
Cv2.Laplacian(gaosi, Edge_Laplacian, MatType.CV_16S, 3, 1, 0);
Cv2.ImShow("Edge_Laplacian", Edge_Laplacian);
// src.CopyTo(dst, edge);
// Cv2.ImShow("dst", dst);
Mat x1 = MyFindContours(gaosi);
Cv2.ImShow("x1", x1);
Cv2.ConvexHull(cannyedge, dst);
src.CvtColor(ColorConversionCodes.BGR2HSV);
Cv2.InRange(src, new Scalar(0, 0, 100), new Scalar(180, 250, 255), dst);
// Cv2.ImShow("dst", dst);
// Cv2.ImShow("kk", src);
// Run(src);
// ProcessFlow();
// ProcessFlow();
}
private void Run(Mat src)
{
Mat points = mat_to_samples(src);
int numCluster = 32;
Mat labels = new Mat();
var centers = new Mat();
TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps, 10, 0.1); // TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1);
// kmeans(data, numCluster, labels, criteria, 3, KMEANS_PP_CENTERS);
Cv2.Kmeans(points, numCluster, labels, criteria, 6, KMeansFlags.PpCenters, centers);
Mat mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
int index = src.Rows * 2 + 2; //获取点(2,2)作为背景色
int cindex = labels.At<int>(index, 0);
int height = src.Rows;
int width = src.Cols;
Mat dst = new Mat();
src.CopyTo(dst);
for (int row = 0; row < height; row++)
{
for (int col = 0; col < width; col++)
{
index = row * width + col;
int label = labels.At<int>(index, 0);
if (label == cindex)
{
dst.At<Vec3b>(row, col)[0] = 0; // 背景
dst.At<Vec3b>(row, col)[1] = 0; // 背景
dst.At<Vec3b>(row, col)[2] = 0; // 背景
mask.At<byte>(row, col) = 0;
}
else
mask.At<byte>(row, col) = 255; // 前景
}
}
Cv2.ImShow("mask", mask);
Cv2.ImShow("KMeans", dst);
Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3), new Point(-1, -1));
Cv2.Erode(mask, mask, kernel);
Cv2.GaussianBlur(mask, mask, new Size(3, 3), 0, 0);
// Cv2.ImShow("Guassianblur Mask", mask);
RNG rng = new RNG(12345);
Vec3b color = new Vec3b();
color[0] = (byte)rng.Uniform(0, 255);
color[2] = (byte)rng.Uniform(0, 255);
color[1] = (byte)rng.Uniform(0, 255);
Mat result = new Mat(src.Size(), src.Type());
double w = 0.0; //融合权重
int b = 0, g = 0, r = 0;
int b1 = 0, g1 = 0, r1 = 0;
int b2 = 0, g2 = 0, r2 = 0;
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
int m = mask.At<byte>(row, col);
if (m == 255)
{
result.At<Vec3b>(row, col) = src.At<Vec3b>(row, col); // 前景
}
else if (m == 0)
{
result.At<Vec3b>(row, col) = color; // 背景
}
else
{/* 融合处理部分 */
w = m / 255.0;
b1 = src.At<Vec3b>(row, col)[0];
g1 = src.At<Vec3b>(row, col)[1];
r1 = src.At<Vec3b>(row, col)[2];
b2 = color[0];
g2 = color[1];
r2 = color[2];
b = (int)(b1 * w + b2 * (1.0 - w));
g = (int)(g1 * w + g2 * (1.0 - w));
r = (int)(r1 * w + r2 * (1.0 - w));
result.At<Vec3b>(row, col)[0] = (byte)b;
result.At<Vec3b>(row, col)[1] = (byte)g;
result.At<Vec3b>(row, col)[2] = (byte)r;
}
}
}
// Cv2.ImShow("SRC ", src);
Cv2.ImShow("RESULT ", result);
}
public void ProcessFlow()
{
Mat src = KMeansHelper.GetMat("C:\\Users\\E053707\\Pictures\\bing\\aa.jpg");
Cv2.ImShow("Source", src);
var dic1 = GetImageColor(src);
Mat s1= MyFindContours(src);
Cv2.ImShow("S1", s1);
}
public void KMeans3()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
Cv2.ImShow("Source", src);
Cv2.WaitKey(1); // do events
//Cv2.Blur(src, src, new Size(15, 15));
//Cv2.ImShow("Blurred Image", src);
//Cv2.WaitKey(1); // do events
// Converts the MxNx3 image into a Kx3 matrix where K=MxN and
// each row is now a vector in the 3-D space of RGB.
// change to a Mx3 column vector (M is number of pixels in image)
var columnVector = src.Reshape(cn: 3, rows: src.Rows * src.Cols);
// convert to floating point, it is a requirement of the k-means method of OpenCV.
var samples = new Mat();
columnVector.ConvertTo(samples, MatType.CV_32FC3);
for (var clustersCount = 204; clustersCount <= 280; clustersCount += 20)
{
var bestLabels = new Mat();
var centers = new Mat();
Cv2.Kmeans(
data: samples,
k: clustersCount,
bestLabels: bestLabels,
criteria:
new TermCriteria(type: CriteriaTypes.Eps|CriteriaTypes.MaxIter, maxCount: 10, epsilon: 1.0),
attempts: 3,
flags:KMeansFlags.PpCenters,
centers: centers);
var clusteredImage = new Mat(src.Rows, src.Cols, src.Type());
for (var size = 0; size < src.Cols * src.Rows; size++)
{
var clusterIndex = bestLabels.At<int>(0, size);
var newPixel = new Vec3b
{
Item0 = (byte)(centers.At<float>(clusterIndex, 0)), // B
Item1 = (byte)(centers.At<float>(clusterIndex, 1)), // G
Item2 = (byte)(centers.At<float>(clusterIndex, 2)) // R
};
clusteredImage.Set(size / src.Cols, size % src.Cols, newPixel);
}
Cv2.ImShow(string.Format("Clustered Image [k:{0}]", clustersCount), clusteredImage);
}
}
public void RunProcessTest()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
Mat data = mat_to_samples(src);//src.Reshape(0,src.Height*src.Width).Clone();// src.Reshape(src.Width * src.Height, src.Width * src.Height); // mat_to_samples(src);
data.ConvertTo(data, MatType.CV_32F);
TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps, 10, 1); // TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1);
Mat labels = new Mat();
var centers = new Mat();
Cv2.Kmeans(data,4, labels, criteria, 10, KMeansFlags.RandomCenters, centers);
int height = centers.Height;
int width = centers.Width;
Mat dst = labels.Reshape(0, src.Height).Clone();
dst.ConvertTo(dst, MatType.CV_32FC1);
Cv2.ImShow("dst", dst );
Cv2.ImShow("data", data);
Cv2.ImShow("SRC", src);
}
public Dictionary<string, int> GetImageColor(Mat src)
{
// src.CvtColor(ColorConversionCodes.BGR2RGBA);
var mat3 = new Mat<Vec3b>(src);
var indexer = mat3.GetIndexer();
StringBuilder sb = new StringBuilder();
Dictionary<string, int> Dic = new Dictionary<string, int>();
string colorcode = "";
for (int i = 0; i < src.Rows; i++)
{
for (int j = 0; j < src.Cols; j++)
{
sb.Clear();
Vec3b color = indexer[i, j];
sb.Append("#").Append(color.Item0.ToString("X").PadLeft(2, '0')).Append(color.Item1.ToString("X").PadLeft(2, '0')).Append(color.Item2.ToString("X").PadLeft(2, '0'));
colorcode = sb.ToString();
if (Dic.ContainsKey(colorcode))
{
Dic[colorcode]++;
}
else
{
Dic.Add(colorcode, 1);
}
}
}
var dicSort = from objDic in Dic orderby objDic.Value descending select objDic;
return Dic;
}
public Dictionary<string, int> GetRowsImageColor(Mat src)
{
// src.CvtColor(ColorConversionCodes.BGR2RGBA);
var mat3 = new Mat<Vec3b>(src);
var indexer = mat3.GetIndexer();
StringBuilder sb = new StringBuilder();
Dictionary<string, int> Dic = new Dictionary<string, int>();
string colorcode = "";
for (int i = 0; i < src.Rows; i++)
{
sb.Clear();
Vec3b color = indexer[i];
sb.Append("#").Append(color.Item0.ToString("X").PadLeft(2, '0')).Append(color.Item1.ToString("X").PadLeft(2, '0')).Append(color.Item2.ToString("X").PadLeft(2, '0'));
colorcode = sb.ToString();
if (Dic.ContainsKey(colorcode))
{
Dic[colorcode]++;
}
else
{
Dic.Add(colorcode, 1);
}
}
var dicSort = from objDic in Dic orderby objDic.Value descending select objDic;
return Dic;
}
public void GetImageColor()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
// Mat binary = new Mat();
// Cv2.CvtColor(src, binary, ColorConversionCodes.BGR2GRAY); ;
Cv2.Threshold(binary, binary, 0, 255, ThresholdTypes.Otsu);//自适应二值化
// binary = 255 - binary;//颜色反转
// Cv2.ImShow("points ",binary);
//src.ConvertTo(src,MatType.CV_64F);
src.CvtColor(ColorConversionCodes.BGR2RGBA);
var mat3 = new Mat<Vec3b>(src);
var indexer = mat3.GetIndexer();
StringBuilder sb = new StringBuilder();
Dictionary<string, int> Dic = new Dictionary<string, int>();
string colorcode = "";
for (int i = 0; i < src.Rows; i++)
{
for (int j = 0; j < src.Cols; j++)
{
sb.Clear();
Vec3b color = indexer[i, j];
sb.Append("#").Append(color.Item0.ToString("X").PadLeft(2, '0')).Append(color.Item1.ToString("X").PadLeft(2, '0')).Append(color.Item2.ToString("X").PadLeft(2, '0'));
colorcode = sb.ToString();
if (Dic.ContainsKey(colorcode))
{
Dic[colorcode]++;
}
else
{
Dic.Add(colorcode, 1);
}
}
}
var dicSort = from objDic in Dic orderby objDic.Value descending select objDic;
int v1 = 0;
Cv2.ImShow("SRC", src);
}
public void Run1()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
Cv2.ImShow("SRC", src);
Cv2.GaussianBlur(src, src, new Size(3, 3), 0, 0);
Cv2.ImShow("GaussianBlur SRC", src);
// Scalar color;
var mat3 = new Mat<Vec3b>(src);
var indexer = mat3.GetIndexer();
StringBuilder sb = new StringBuilder();
Dictionary<string, int> Dic = new Dictionary<string, int>();
string colorcode = "";
for (int i = 0; i < src.Rows; i++)
{
for (int j = 0; j < src.Cols; j++)
{
sb.Clear();
Vec3b color = indexer[i, j];
sb.Append("#").Append(color.Item0.ToString("X").PadLeft(2, '0')).Append(color.Item1.ToString("X").PadLeft(2, '0')).Append(color.Item2.ToString("X").PadLeft(2, '0'));
colorcode = sb.ToString();
if (Dic.ContainsKey(colorcode))
{
Dic[colorcode]++;
}
else
{
Dic.Add(colorcode, 1);
}
}
}
for (int i = 0; i < src.Rows; i++)
{
for (int j = 0; j < src.Cols; j++)
{
sb.Clear();
Vec3b color = indexer[i, j];
sb.Append("#").Append(color.Item0.ToString("X").PadLeft(2, '0')).Append(color.Item1.ToString("X").PadLeft(2, '0')).Append(color.Item2.ToString("X").PadLeft(2, '0'));
colorcode = sb.ToString();
//if (colorcode.Equals("#FFFFFF")|| colorcode.Equals("#FEFEFE"))
//{
// Vec3b newColor = new Vec3b(76, 177, 34);
// indexer[i, j] = newColor;
//}
if (colorcode.Equals("#B9A8AB") || colorcode.Equals("#BAA9AC"))
{
Vec3b newColor = new Vec3b(76, 177, 34);
indexer[i, j] = newColor;
}
}
}
Cv2.ImShow("SRCDest", src);
var dicSort = from objDic in Dic orderby objDic.Value descending select objDic;
Console.WriteLine(Dic.Count);
}
private void Run()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
Mat points = mat_to_samples(src);
int numCluster =32;
Mat labels = new Mat();
var centers = new Mat();
TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps, 10, 0.1); // TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1);
// kmeans(data, numCluster, labels, criteria, 3, KMEANS_PP_CENTERS);
Cv2.Kmeans(points, numCluster, labels, criteria, 6, KMeansFlags.PpCenters, centers);
Mat mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
int index = src.Rows * 2 + 2; //获取点(2,2)作为背景色
int cindex = labels.At<int>(index, 0);
int height = src.Rows;
int width = src.Cols;
Mat dst=new Mat();
src.CopyTo(dst);
for (int row = 0; row < height; row++)
{
for (int col = 0; col < width; col++)
{
index = row * width + col;
int label = labels.At<int>(index, 0);
if (label == cindex)
{
dst.At<Vec3b>(row, col)[0] = 0; // 背景
dst.At<Vec3b>(row, col)[1] = 0; // 背景
dst.At<Vec3b>(row, col)[2] = 0; // 背景
mask.At<byte>(row, col) = 0;
}
else
mask.At<byte>(row, col) = 255; // 前景
}
}
Cv2.ImShow("mask", mask);
Cv2.ImShow("KMeans", dst);
Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3), new Point(-1, -1));
Cv2.Erode(mask, mask, kernel);
Cv2.GaussianBlur(mask, mask, new Size(3, 3), 0, 0);
// Cv2.ImShow("Guassianblur Mask", mask);
RNG rng = new RNG(12345);
Vec3b color = new Vec3b();
color[0] = (byte)rng.Uniform(0, 255);
color[2] = (byte)rng.Uniform(0, 255);
color[1] = (byte)rng.Uniform(0, 255);
Mat result = new Mat(src.Size(), src.Type());
double w = 0.0; //融合权重
int b = 0, g = 0, r = 0;
int b1 = 0, g1 = 0, r1 = 0;
int b2 = 0, g2 = 0, r2 = 0;
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
int m = mask.At<byte>(row, col);
if (m == 255)
{
result.At<Vec3b>(row, col) = src.At<Vec3b>(row, col); // 前景
}
else if (m == 0)
{
result.At<Vec3b>(row, col) = color; // 背景
}
else
{/* 融合处理部分 */
w = m / 255.0;
b1 = src.At<Vec3b>(row, col)[0];
g1 = src.At<Vec3b>(row, col)[1];
r1 = src.At<Vec3b>(row, col)[2];
b2 = color[0];
g2 = color[1];
r2 = color[2];
b = (int)(b1 * w + b2 * (1.0 - w));
g = (int)(g1 * w + g2 * (1.0 - w));
r = (int)(r1 * w + r2 * (1.0 - w));
result.At<Vec3b>(row, col)[0] = (byte)b;
result.At<Vec3b>(row, col)[1] = (byte)g;
result.At<Vec3b>(row, col)[2] = (byte)r;
}
}
}
// Cv2.ImShow("SRC ", src);
Cv2.ImShow("RESULT ", result);
}
private void Process()
{
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\aa.jpg", ImreadModes.Color);
Mat data = new Mat();
for (int i = 0; i < src.Rows; i++)
{//像素点线性排列
for (int j = 0; j < src.Cols; j++)
{
Vec3b point = src.At<Vec3b>(i, j);
// Mat tmp = (Mat_<float>(1, 3) << point[0], point[1], point[2]);
Mat tmp = new Mat(1, 3, MatType.CV_32F, new Scalar(point[0], point[1], point[2]));
data.PushBack(tmp);
}
}
Cv2.ImShow("data", data);
int numCluster = 4;
Mat labels = new Mat();
var centers = new Mat();
TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps, 10, 0.1); // TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1);
// kmeans(data, numCluster, labels, criteria, 3, KMEANS_PP_CENTERS);
Cv2.Kmeans(data, numCluster, labels, criteria, 3, KMeansFlags.PpCenters, centers);
// 3.背景与人物二值化
Mat mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
int index = src.Rows * 2 + 2; //获取点(2,2)作为背景色
int cindex = labels.At<int>(index, 0);
/* 提取背景特征 */
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
index = row * src.Cols + col;
int label = labels.At<int>(index, 0);
if (label == cindex)
{ // 背景
mask.At<byte>(row, col) = 0;
}
else
{
mask.At<byte>(row, col) = 255;
}
}
}
Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3), new Point(-1, -1));
Cv2.Erode(mask, mask, kernel);
Cv2.GaussianBlur(mask, mask, new Size(3, 3), 0, 0);
Cv2.ImShow("Mask", mask);
RNG rng = new RNG(12345);
Vec3b color = new Vec3b();
color[0] = (byte)rng.Uniform(0, 255);
color[2] = (byte)rng.Uniform(0, 255);
color[1] = (byte)rng.Uniform(0, 255);
Mat result = new Mat(src.Size(), src.Type());
double w = 0.0; //融合权重
int b = 0, g = 0, r = 0;
int b1 = 0, g1 = 0, r1 = 0;
int b2 = 0, g2 = 0, r2 = 0;
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
int m = mask.At<byte>(row, col);
if (m == 255)
{
result.At<Vec3b>(row, col) = src.At<Vec3b>(row, col); // 前景
}
else if (m == 0)
{
result.At<Vec3b>(row, col) = color; // 背景
}
else
{/* 融合处理部分 */
w = m / 255.0;
b1 = src.At<Vec3b>(row, col)[0];
g1 = src.At<Vec3b>(row, col)[1];
r1 = src.At<Vec3b>(row, col)[2];
b2 = color[0];
g2 = color[1];
r2 = color[2];
b = (int)(b1 * w + b2 * (1.0 - w));
g =(int) (g1 * w + g2 * (1.0 - w));
r = (int)(r1 * w + r2 * (1.0 - w));
result.At<Vec3b>(row, col)[0] = (byte)b;
result.At<Vec3b>(row, col)[1] = (byte)g;
result.At<Vec3b>(row, col)[2] = (byte)r;
}
}
}
Cv2.ImShow("RESULT ", result);
}
private void T1()
{
Mat dst = new Mat();
Mat src = Cv2.ImRead("C:\\Users\\E053707\\Pictures\\bing\\a.jpg", ImreadModes.Color);
// Mat dst_Image = MyFindContours(src);
// Cv2.ImShow("contours", dst_Image);
Mat data = mat_to_samples(src);
Cv2.ImShow("data", data);
int numCluster = 4;
Mat labels = new Mat();
var centers = new Mat();
TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps, 10, 0.1); // TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1);
// kmeans(data, numCluster, labels, criteria, 3, KMEANS_PP_CENTERS);
Cv2.Kmeans(data, numCluster, labels, criteria, 3, KMeansFlags.PpCenters, centers);
//https://github.com/VahidN/OpenCVSharp-Samples/blob/master/OpenCVSharpSample11/Program.cs
// 3.背景与人物二值化
Mat mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
int index = src.Rows * 2 + 2; //获取点(2,2)作为背景色
int cindex = labels.At<int>(index, 0);
src.CopyTo(dst);
/* 提取背景特征 */
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
index = row * src.Cols + col;
int label = labels.At<int>(index, 0);
if (label == cindex)
{ // 背景
mask.At<byte>(row, col) = 0;
}
else
{
mask.At<byte>(row, col) = 255;
}
}
}
Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3), new Point(-1, -1));
Cv2.Erode(mask, mask, kernel);
Cv2.GaussianBlur(mask, mask, new Size(3, 3), 0, 0);
RNG rng = new RNG();
Vec3b color = new Vec3b();
color[0] = (byte)rng.Uniform(0, 255);
color[2] = (byte)rng.Uniform(0, 255);
color[1] = (byte)rng.Uniform(0, 255);
dst = Mat.Zeros(src.Size(), src.Type());
for (int row = 0; row < src.Rows; row++)
{
for (int col = 0; col < src.Cols; col++)
{
int temp = mask.At<byte>(row, col);//(row,col)的像素值
if (temp == 255)//在mask为白色的区域,将src直接赋给dst
{
dst.At<Vec3b>(row, col) = src.At<Vec3b>(row, col);//前景
}
else if (temp == 0) //在mask为黑色的区域,给dst添上随机颜色
{
dst.At<Vec3b>(row, col) = color; //背景
}
else//边缘处:非纯黑/非纯白
{
double weight = temp / 255;//权重值(可以理解为灰度占比)
//在(row,col)上src的rgb值
int b_src = src.At<Vec3b>(row, col)[0];
int g_src = src.At<Vec3b>(row, col)[1];
int r_src = src.At<Vec3b>(row, col)[2];
//在(row,col)上color的rgb随机值
int b_color = color[0];
int g_color = color[1];
int r_color = color[2];
//按权重weight通道混合
dst.At<Vec3b>(row, col)[0] = (byte)(weight * b_src + (1 - weight) * b_color);
dst.At<Vec3b>(row, col)[1] = (byte)(weight * g_src + (1 - weight) * g_color);
dst.At<Vec3b>(row, col)[2] = (byte)(weight * r_src + (1 - weight) * r_color);
}
}
Cv2.ImShow("mask", mask);
using (new Window("dst image", dst))
{
Cv2.WaitKey();
}
}
}
Mat mat_to_samples(Mat image)
{
int width = image.Cols;
int height = image.Rows;
int samplecount = width * height;//采样数
int dims = image.Channels();
Mat temp = new Mat(samplecount, dims, MatType.CV_32F, new Scalar(10));// (samplecount, dims, CV_32F, Scalar(10));//存放样本点
// RGB 数据转换到样本数据
int index = 0;
for (int row = 0; row < height; row++)
{
for (int col = 0; col < width; col++)
{
index = row * width + col;//索引值 = 行*宽度 + 列
Vec3b bgr = image.At<Vec3b>(row, col);
temp.At<float>(index, 0) = Convert.ToInt32(bgr[0]);
temp.At<float>(index, 1) = Convert.ToInt32(bgr[1]);
temp.At<float>(index, 2) = Convert.ToInt32(bgr[2]);
}
}
return temp;
}
// findContours()用于对物体轮廓进行检测
Mat MyFindContours(Mat srcImage)
{
//转化为灰度图
Mat src_gray = new Mat();
Cv2.CvtColor(srcImage, src_gray, ColorConversionCodes.RGB2GRAY);
//滤波
Cv2.Blur(src_gray, src_gray, new Size(3, 3));
//Canny边缘检测
Mat canny_Image = new Mat();
Cv2.Canny(src_gray, canny_Image, 100, 200);
//获得轮廓
Point[][] contours;
HierarchyIndex[] hierarchly;
Cv2.FindContours(canny_Image, out contours, out hierarchly, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, new Point(0, 0));
//将结果画出并返回结果
Mat dst_Image = Mat.Zeros(canny_Image.Size(), srcImage.Type());
Random rnd = new Random();
for (int i = 0; i < contours.Length; i++)
{
Scalar color = new Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
Cv2.DrawContours(dst_Image, contours, i, color, 2, LineTypes.Link8, hierarchly);
}
return dst_Image;
}
Mat MyFindEdgeContours(Mat srcImage,int avgLen=50,int avgarea=6)
{
Point[][] contours;
HierarchyIndex[] hierarchly;
Cv2.FindContours(srcImage, out contours, out hierarchly, RetrievalModes.External, ContourApproximationModes.ApproxNone, new Point(0, 0));
//将结果画出并返回结果
Mat dst_Image = Mat.Zeros(srcImage.Size(), srcImage.Type());
Scalar color = new Scalar(255, 255, 255);
for (int i = 0; i < contours.Length; i++)
{
var length = Cv2.ArcLength(contours[i], true);
var area = Cv2.ContourArea(contours[i]);
if ((length > avgLen)&&(area>avgarea))
{
Cv2.DrawContours(dst_Image, contours, i, color, 1, LineTypes.Link4, hierarchly);
}
}
return dst_Image;
}
Mat CheckContours(Mat srcImage )
{
Point[][] contours;
HierarchyIndex[] hierarchly;
Cv2.FindContours(srcImage, out contours, out hierarchly, RetrievalModes.External, ContourApproximationModes.ApproxNone, new Point(0, 0));
//将结果画出并返回结果
Mat mask = Mat.Zeros(srcImage.Size(), srcImage.Type());
for(int i = 0; i < srcImage.Width; i++)
{
for(int j = 0; j < srcImage.Height; j++)
{
Point2f pt = new Point2f(i, j);
for(int k = 0; k < contours.Length; k++)
{
var res= Cv2.PointPolygonTest(contours[k], pt, false);
if (res == 1 || res == 0)
{
mask.At<byte>(i, j) = 128;
}
}
}
}
return mask;
}
}
}