using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenCvSharp;
using Size = OpenCvSharp.Size;
namespace TargetMeasureObject
{
class BlobProcessor : NormalProcessor
{
static private Mat mouseDeltImage = new Mat();
static private Mat mouseTempImage = new Mat();
static private Rect roiRectangle = new Rect();
static private double HISTGRAM_ANNOTATION_FONT = 0.5;
static private int HISTGRAM_ANNOTATION_INTERVAL = 20;
static private Scalar HISTGRAM_LINE_N_COLOR = Scalar.Gray;
static private Scalar HISTGRAM_LINE_R_COLOR = Scalar.Red;
static private Scalar HISTGRAM_LINE_G_COLOR = Scalar.Green;
static private Scalar HISTGRAM_LINE_B_COLOR = Scalar.Blue;
static private int HISTGRAM_LINE_THICK = 2;
static private Scalar HISTGRAM_BACKGROUND = Scalar.Black;
static private double HISTGRAM_PEAK_HEIGHT_RATIO = 0.8;
static private Size HISTGRAM_SIZE = new Size(512, 300);
static private Point MouseDownPoint = new Point();
static private Point MouseUpPoint = new Point();
static private Point MouseMovePoint = new Point();
static public bool MouseFlags = false;
static public bool MouseMove = false;
/// <summary>
/// 查询所选位置得直线灰度分布
/// </summary>
/// <param name="img"></param>
/// <param name="position"></param>
/// <param name="orientation"></param>
/// <param name="annoImg"></param>
/// <param name="LineFeature"></param>
/// <param name="ValidEdge"></param>
/// <param name="stripWidth"></param>
/// <param name="selectNum"></param>
/// <param name="scale"></param>
/// <param name="pScale"></param>
static public void LineGrayMapExtractor(Mat img,Point position,ENUMS.ORIENTATION orientation,out Mat annoImg,out List<PixelFeature> LineFeature,out List<PixelFeature>ValidEdge
,int stripWidth = 5,int selectNum=4,double scale = 1, double pScale = 0.25)
{
annoImg = new Mat();
List<PixelFeature> result = new List<PixelFeature>();
ValidEdge = new List<PixelFeature>();
Mat _uImg = ImageGrayDetect(img.Clone());
var _Data = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(_uImg);
Mat _sImg = img.Clone();
if (_sImg.Type() == MatType.CV_8UC1)
{
Cv2.Merge(new Mat[] { _sImg, _sImg, _sImg }, _sImg);
}
int _width = _uImg.Width, _height = _uImg.Height;
List<int> pixels = new List<int>();
List<int> diffPosPixel = new List<int>();
List<int> diffNegPixel = new List<int>();
List<Point> pixelValCurvePoints = new List<Point>();
List<Point> posPixelDiffCurvePoints = new List<Point>();
List<Point> negPixelDiffCurvePoints = new List<Point>();
List<PixelFeature> PixelsInform = new List<PixelFeature>();
int k = stripWidth / 2;//条纹距离权重间距
int _orien = (int)orientation;
/*正向像素信息收集、求差*/
for (int i = 0; i < (_orien == 0 ? _width : _height); i++)
{
int sumPix = 0;
int cenPix = 0;
for (int j = _orien == 0 ? position.Y - k:position.X-k; j <= ((_orien == 0) ? position.Y + k:position.X+k); j++)
{
double distCoe = (double)((k + 1) - Math.Abs(_orien == 0 ?position.Y - j:position.X-j)) / (double)(stripWidth * (k + 1));
var color = _Data.GetPixel(
_orien == 0 ? i : (j >= 0 && j <= _width) ? j : position.X,
_orien == 0 ? (j >= 0 && j < _height) ? j : position.Y : i);
int cpix = (byte)(color.R * .299 + color.G * .587 + color.B * .114);
cenPix = _orien == 0 ? j == position.Y ? cpix : cenPix : j == position.X ? cpix : cenPix;//中心点像素
sumPix += (int)((double)cpix * distCoe);
}
for (int j = i - k; j <= i + k; j++)
{
if (j == i)
{
continue;
}
double distCoe = (double)((k + 1) - Math.Abs(i - j)) / (double)(stripWidth * (k + 1));
var color = _Data.GetPixel(
_orien == 0 ? j >= 0 && j < _width ? j : i : position.X,
_orien == 0 ? position.Y : j >= 0 && j < _height ? j : i);
int cpix = (byte)(color.R * .299 + color.G * .587 + color.B * .114);
sumPix += (int)((double)cpix * distCoe);
}
pixels.Add(sumPix);
pixelValCurvePoints.Add(_orien == 0 ?
new Point(i, position.Y + (cenPix * pScale) < _height ? position.Y + (cenPix * pScale) : _height-1) :
new Point(position.X + cenPix * pScale < _width ? position.X + cenPix * pScale : _width-1, i));
int pDiff = i == 0 ? 0 : sumPix - pixels[i - 1];//相邻点像素差值
diffPosPixel.Add(pDiff);
Point diffPt = new Point();
if (_orien == 0)
{
diffPt.Y = position.Y + (pDiff * scale) < 0 ? 1 : (int)(position.Y + (pDiff * scale));
diffPt.Y = position.Y + (pDiff * scale) > _height ? _height - 1 : (int)(position.Y + (pDiff * scale));
diffPt.X = i;
}
else if(_orien==1)
{
diffPt.X = position.X + (pDiff * scale) < 0 ? 1 : (int)(position.X + (pDiff * scale));
diffPt.X = position.X + (pDiff * scale) > _width ? _width - 1 : (int)(position.X + (pDiff * scale));
diffPt.Y = i;
}
posPixelDiffCurvePoints.Add(diffPt);
PixelsInform.Add(new PixelFeature(cenPix, sumPix, _orien==0?new Point(i, position.Y): new Point(position.X, i), pDiff));
}
/*相邻点非极大值抑制处理*/
int NMSWidth = 19;
for(int i = NMSWidth / 2; i < PixelsInform.Count - NMSWidth / 2; i++)
{
int maxId = -1;
int maxPix = int.MinValue;
for(int j = i - NMSWidth / 2; j <= i + NMSWidth / 2; j++)
{
if (PixelsInform[j].PosPixDiff == 0) continue;
if (Math.Abs(PixelsInform[j].PosPixDiff) > maxPix)
{
maxPix = Math.Abs(PixelsInform[j].PosPixDiff);
maxId = j;
}
}
for(int j = i - NMSWidth / 2; j <= i + NMSWidth / 2; j++)
{
PixelsInform[j].PosPixDiff = j != maxId ? 0 : PixelsInform[j].PosPixDiff;
if (PixelsInform[j].PosPixDiff == 0) continue;
posPixelDiffCurvePoints[j] =
_orien == 0 ? new Point(PixelsInform[j].Position.X, PixelsInform[j].Position.Y + (double)PixelsInform[j].PosPixDiff * scale)
: new Point(PixelsInform[j].Position.X + (double)PixelsInform[j].PosPixDiff * scale, PixelsInform[j].Position.Y);
}
}
for (int i = 1; i < PixelsInform.Count - 1; i++)
{
int maxDiff = Math.Max(Math.Abs(PixelsInform[i - 1].PosPixDiff), Math.Abs(PixelsInform[i].PosPixDiff));
PixelsInform[i - 1].PosPixDiff = (Math.Abs(PixelsInform[i - 1].PosPixDiff)! < maxDiff ? 0 : PixelsInform[i - 1].PosPixDiff);
PixelsInform[i].PosPixDiff = (Math.Abs(PixelsInform[i].PosPixDiff)! < maxDiff ? 0 : PixelsInform[i].PosPixDiff);
PixelsInform[i + 1].PosPixDiff = (Math.Abs(PixelsInform[i + 1].PosPixDiff)! < maxDiff ? 0 : PixelsInform[i + 1].PosPixDiff);
posPixelDiffCurvePoints[i - 1] = new Point(PixelsInform[i - 1].Position.X,
(int)(PixelsInform[i - 1].Position.Y + (double)PixelsInform[i - 1].PosPixDiff * scale));
posPixelDiffCurvePoints[i] = new Point(PixelsInform[i].Position.X,
(int)(PixelsInform[i].Position.Y + (double)PixelsInform[i].PosPixDiff * scale));
posPixelDiffCurvePoints[i+1] = new Point(PixelsInform[i+1].Position.X,
(int)(PixelsInform[i+1].Position.Y + (double)PixelsInform[i+1].PosPixDiff * scale));
}
/*反向像素差收集*/
for (int i = 0; i < PixelsInform.Count; i++)
{
int nDiff = i != PixelsInform.Count - 1 ? (-1) * PixelsInform[i + 1].PosPixDiff : 0;
PixelsInform[i].NegPixDiff = nDiff;
diffNegPixel.Add(nDiff);
Point diffPt = new Point();
if (_orien == 0)
{
diffPt.Y = position.Y + (nDiff * scale) < 0 ? 1 : (int)(position.Y + (nDiff * scale));
diffPt.Y = position.Y + (nDiff * scale) > _height ? _height - 1 : (int)(position.Y + (nDiff * scale));
diffPt.X = i;
}
else
{
diffPt.X = position.X + (nDiff * scale) < 0 ? 1 : (int)(position.X + (nDiff * scale));
diffPt.X = position.X + (nDiff * scale) > _width ? _width - 1 : (int)(position.X + (nDiff * scale));
diffPt.Y = i;
}
negPixelDiffCurvePoints.Add(diffPt);
}
LineFeature = PixelsInform;
List<PixelFeature> possiblePoints = new List<PixelFeature>();
int validDist = 5;
for (int i = 0; i < PixelsInform.Count; i++)
{
if (i == 0 || i == PixelsInform.Count - 1)
{
continue;
}
double diff1 = PixelsInform[i].PosPixDiff - PixelsInform[i - 1].PosPixDiff;
double diff2 = PixelsInform[i + 1].PosPixDiff - PixelsInform[i].PosPixDiff;
if (diff1 * diff2 < 0 || (diff1 * diff2 == 0 && diff1 + diff2 != 0))
{
if (possiblePoints.Count != 0)
{
int distDiff = _orien == 0 ?
PixelsInform[i].Position.X - possiblePoints[possiblePoints.Count - 1].Position.X :
PixelsInform[i].Position.Y - possiblePoints[possiblePoints.Count - 1].Position.Y;
if (distDiff < validDist)
{
if (Math.Abs(PixelsInform[i].PosPixDiff) >= Math.Abs(possiblePoints[possiblePoints.Count - 1].PosPixDiff))
{
if ((PixelsInform[i].PosPixDiff) * (possiblePoints[possiblePoints.Count - 1].PosPixDiff) > 0)
{
possiblePoints[possiblePoints.Count - 1] = PixelsInform[i];
}
else
{
possiblePoints.Add(PixelsInform[i]);
}
}
}
else
{
possiblePoints.Add(PixelsInform[i]);
}
}
else
{
possiblePoints.Add(PixelsInform[i]);
}
}
}
possiblePoints = possiblePoints.OrderByDescending(pt => Math.Abs(pt.PosPixDiff)).ToList();
ValidEdge = possiblePoints.Take(selectNum).ToList();
if (_orien == 0)
{
ValidEdge = ValidEdge.OrderBy(pt => pt.Position.X).ToList();
}
else
{
ValidEdge = ValidEdge.OrderBy(pt => pt.Position.Y).ToList();
}
//峰值部分注释部分添加
for (int i = 0; i < ValidEdge.Count; i++)
{
Cv2.Circle(_sImg, new Point(
_orien == 0 ? ValidEdge[i].Position.X : ValidEdge[i].Position.X + ValidEdge[i].PosPixDiff*scale,
_orien == 0 ? ValidEdge[i].Position.Y + ValidEdge[i].PosPixDiff * scale : ValidEdge[i].Position.Y),
3, Scalar.Red, 2);
_sImg = ImageAnnotation(_sImg, ValidEdge[i].PosPixDiff.ToString(), new Point(
_orien == 0 ? ValidEdge[i].Position.X : ValidEdge[i].Position.X + ValidEdge[i].PosPixDiff * scale,
_orien == 0 ? ValidEdge[i].Position.Y + ValidEdge[i].PosPixDiff * scale : ValidEdge[i].Position.Y),
Scalar.Green,0.5);
_sImg = ImageAnnotation(_sImg, possiblePoints[i].AvrPixel.ToString(), new Point(
_orien == 0 ? ValidEdge[i].Position.X : ValidEdge[i].Position.X + ValidEdge[i].NegPixDiff * scale,
_orien == 0 ? ValidEdge[i].Position.Y + ValidEdge[i].NegPixDiff * scale : ValidEdge[i].Position.Y),
Scalar.Red);
}
//边缘间距显示添加
{
//int measLineWid = 4;
//int annoShift = 5;
//for (int i = 0; i < contourNumber * 2 ; i++)
//{
// Cv2.Line(_sImg,
// _orien == 0 ? new Point(possiblePoints[i].Position.X, possiblePoints[i].Position.Y - (measLineWid / 2))
// : new Point(possiblePoints[i].Position.X - (measLineWid / 2), possiblePoints[i].Position.Y),
// _orien == 0 ? new Point(possiblePoints[i].Position.X, possiblePoints[i].Position.Y + (measLineWid / 2))
// : new Point(possiblePoints[i].Position.X + (measLineWid / 2), possiblePoints[i].Position.Y),
// Scalar.Blue, 1);
// if (i != 0)
// {
// int edgeDist = _orien == 0
// ? Math.Abs(possiblePoints[i].Position.X - possiblePoints[i - 1].Position.X)
// : Math.Abs(possiblePoints[i].Position.Y - possiblePoints[i - 1].Position.Y);
// Point pt1 = _orien == 0
// ? possiblePoints[i - 1].Position : possiblePoints[i - 1].Position;
// Point pt2 = _orien == 0
// ? possiblePoints[i].Position : possiblePoints[i].Position;
// Cv2.Line(_sImg, pt1, pt2, Scalar.Blue, 1);
// _sImg = ImageAnnotation(_sImg,edgeDist.ToString(),
// new Point(_orien == 0 ? pt1.X + edgeDist / 2 : pt1.X + annoShift, _orien == 0 ? pt1.Y - annoShift : pt1.Y + edgeDist / 2),
// Scalar.Red, 0.3);
// }
//}
}
{ 多阶像素差值曲线绘制
//int subThresh = 5;
//int iteration = 1;
//List<int> tempDatas = new List<int>();
//List<int> nearDiffDatas = new List<int>();
//List<Point> nearDiffCurvePoints = new List<Point>();
//for (int i = 0; i < iteration; i++)
//{
// if (i == 0)
// {
// for (int j = 0; j < PixelsInform.Count; j++)
// {
// tempDatas.Add(PixelsInform[j].PosPixDiff);
// }
// }
// else
// {
// nearDiffDatas = new List<int>();
// for (int j = 0; j < tempDatas.Count(); j++)
// {
// nearDiffDatas.Add(j == 0 ? tempDatas[j] : tempDatas[j] - tempDatas[j - 1]);
// }
// tempDatas.Clear();
// tempDatas = nearDiffDatas;
// }
//}
//for (int i = 0; i < tempDatas.Count; i++)
//{
// nearDiffCurvePoints.Add(new Point(PixelsInform[i].Position.X, PixelsInform[i].Position.Y + tempDatas[i] * scale));
//}
//_sImg = ImageCurveDrawing(_sImg, nearDiffCurvePoints.ToArray(), Scalar.Red, 1);
//List<int> subDiff = new List<int>();
//List<Point> subDiffCurvePoints = new List<Point>();
//for (int i = 0; i < PixelsInform.Count; i++)
//{
// subDiff.Add(i == 0 ? PixelsInform[i].PosPixDiff : PixelsInform[i].PosPixDiff - PixelsInform[i - 1].PosPixDiff);
// subDiffCurvePoints.Add(new Point(PixelsInform[i].Position.X, PixelsInform[i].Position.Y + subDiff[i] * scale));
//}
//_sImg = ImageCurveDrawing(_sImg, subDiffCurvePoints.ToArray(), Scalar.Blue, 1);
//List<int> thirdDiff = new List<int>();
//List<Point> thirdDiffCurvePoint = new List<Point>();
//for (int i = 0; i < subDiff.Count; i++)
//{
// thirdDiff.Add(i == 0 ? subDiff[i] : subDiff[i] - subDiff[i - 1]);
// thirdDiffCurvePoint.Add(new Point(PixelsInform[i].Position.X, PixelsInform[i].Position.Y + thirdDiff[i] * scale));
//}
//_sImg = ImageCurveDrawing(_sImg, thirdDiffCurvePoint.ToArray(), Scalar.Red, 1);
}
//像素值-像素差-基准线绘制部分
_sImg = ImageBasicLineDrawing(_sImg, position, ENUMS.IMAGE_PERMUTATION_TYPE.VERTICAL);
_sImg = ImageBasicLineDrawing(_sImg, position, ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL);
_sImg = ImageCurveDrawing(_sImg, posPixelDiffCurvePoints.ToArray(), Scalar.Blue, 1);
//_sImg = ImageCurveDrawing(_sImg, negPixelDiffCurvePoints.ToArray(), Scalar.Red, 1);
_sImg = ImageCurveDrawing(_sImg, pixelValCurvePoints.ToArray(), Scalar.Gray, 1);
annoImg = _sImg.Clone();
}
static private void MouseDownFunc(MouseEventTypes @event, int x, int y, MouseEventFlags flags, IntPtr Data)
{
Rect roiRect = new Rect();
if (@event == MouseEventTypes.LButtonDown)
{
MouseDownPoint = new Point(x, y);
MouseFlags = true;
}
else if (@event == MouseEventTypes.LButtonUp )
{
MouseUpPoint = new Point(x, y);
if (MouseFlags && !MouseMove)
{
MouseUpPoint = new Point(x, y);
MouseFlags = false;
switch (WINDOW_NAME)
{
case "ImageRoiMeasurement":
{
if (roiRectangle.Width * roiRectangle.Height != 0)
{
mouseTempImage = new Mat(mouseDeltImage, roiRectangle);
LineGrayMapExtractor(mouseTempImage,
new Point(MouseDownPoint.X - roiRectangle.X, MouseDownPoint.Y - roiRectangle.Y),
roiRectangle.Width > roiRectangle.Height ? ENUMS.ORIENTATION.HORIZONTAL : ENUMS.ORIENTATION.VERTICAL,
out Mat temp, out List<PixelFeature> LineFeatures, out List<PixelFeature> validEdge, 7, 2, scale: 0.5);
mouseDeltImage[roiRectangle] = temp;
Cv2.ImShow(WINDOW_NAME, mouseDeltImage);
}
break;
}
}
}
else if (MouseFlags && MouseMove)
{
MouseUpPoint = new Point(x, y);
MouseMovePoint = new Point(x, y);
MouseFlags = false;
MouseMove = false;
switch (WINDOW_NAME)
{
case "ImageRoiMeasurement":
{
mouseDeltImage = mouseTempImage.Clone();
Cv2.ImShow("ImageRoiMeasurement", mouseDeltImage);
break;
}
}
}
}
else if (@event == MouseEventTypes.MouseMove && MouseFlags)
{
MouseMovePoint = new Point(x, y);
MouseMove = true;
switch (WINDOW_NAME)
{
case "ImageRoiMeasurement":
{
mouseTempImage = mouseDeltImage.Clone();
roiRect = new Rect(
new Point(Math.Min(MouseDownPoint.X, MouseMovePoint.X),
Math.Min(MouseDownPoint.Y, MouseMovePoint.Y)),
new Size(Math.Abs(MouseDownPoint.X - MouseMovePoint.X),
Math.Abs(MouseDownPoint.Y - MouseMovePoint.Y)));
if (mouseTempImage.Type() != MatType.CV_8UC3)
{
Cv2.Merge(new Mat[] { mouseTempImage, mouseTempImage, mouseTempImage }, mouseTempImage);
}
Cv2.Rectangle(mouseTempImage, new Rect(roiRect.X-2, roiRect.Y-2,roiRect.Width+4,roiRect.Height+4), Scalar.Green, 1);
roiRectangle = roiRect;
Cv2.ImShow("ImageRoiMeasurement", mouseTempImage);
break;
}
}
}
return;
}
static public void ImageRoiMeasurement(Mat img)
{
mouseDeltImage = img.Clone();
WINDOW_NAME = "ImageRoiMeasurement";
Cv2.NamedWindow(WINDOW_NAME, WindowFlags.AutoSize);
Cv2.ImShow(WINDOW_NAME, mouseDeltImage);
Cv2.SetMouseCallback(WINDOW_NAME, MouseDownFunc);
Cv2.WaitKey();
}
/// <summary>
/// 获取图像直方图数据
/// </summary>
/// <param name="img"></param>
/// <param name="normalize"></param>
/// <param name="show"></param>
/// <returns>直方图,均衡后得直方图,最大像素,最大像素占比</returns>
static public ImageBlobData ImageHisgramDataGet(Mat img, bool show = false)
{
Mat _uImg = img.Clone();
int a = _uImg.Channels();
int num = _uImg.Channels()!=1?_uImg.Channels()+1:_uImg.Channels();
Mat[] _outHist = new Mat[num];
Mat[] _normHist = new Mat[num];
int[] _peakPixel = new int[num];
double[] _peakRatio = new double[num];
/***获取三通道直方图数据***/
for (int i = 0; i < num; i++)
{
int _hId = i == 0 ? i : i - 1;
Mat _hImg = (_uImg.Channels()==3 && _hId==i)?ImageGrayDetect(_uImg):_uImg.Clone();
_outHist[_hId==i?_hId:_hId+1] = new Mat();
_normHist[_hId == i ? _hId : _hId + 1] = new Mat();
Cv2.CalcHist(new Mat[] { _hImg }, new int[] { _hId }, new Mat(), _outHist[_hId == i ? _hId : _hId + 1], 1, new int[] { 256 }, new Rangef[] { new Rangef(0, 256) });
Cv2.Normalize(_outHist[_hId == i ? 0 : _hId + 1], _normHist[_hId == i ? 0 : _hId + 1]);
double minR = 0, maxR = 0;
Point minL = new Point(), maxL = new Point();
Cv2.MinMaxLoc(_normHist[_hId == i ? _hId : _hId + 1], out minR, out maxR, out minL, out maxL);
_peakPixel[_hId == i ? _hId : _hId + 1] = maxL.Y;
_peakRatio[_hId == i ? _hId : _hId + 1] = maxR;
}
ImageBlobData _outData = new ImageBlobData(_outHist, _normHist, _peakPixel, _peakRatio);
ImageHistgramDraw(_outData, show);
return _outData;
}
/// <summary>
/// 通过直方图数据绘制直方图
/// </summary>
/// <param name="blobData"></param>
/// <param name="show"></param>
/// <returns>直方图数组,直方图合并</returns>
static public Mat[] ImageHistgramDraw(ImageBlobData blobData, bool show = false)
{
int num = blobData.HistgramNormalizedDatas.Length;
int histHeight = HISTGRAM_SIZE.Height;
int histWidth = HISTGRAM_SIZE.Width;
Mat histMerge = new Mat(HISTGRAM_SIZE, MatType.CV_8UC3, Scalar.Black);
Mat[] _outHistImgs = new Mat[num];
double _xscale = (double)histWidth / 256.0;
double _yscale = histHeight * HISTGRAM_PEAK_HEIGHT_RATIO / 100;
for (int i = 0; i < num; i++)
{
/***CLOOECT HISTGRAM PIXEL AND RATIO TO MAKE THE LINE'S POINTS ARRAY***/
_outHistImgs[i] = new Mat(HISTGRAM_SIZE, MatType.CV_8UC3, HISTGRAM_BACKGROUND);
Mat _hist = blobData.HistgramNormalizedDatas[i];
Point[] linePoints = new Point[256];
for (int k = 0; k < 256; k++)
{
Point pt = new Point();
float _ratio = _hist.Get<float>(k);
pt.X = (int)((double)k * (double)_xscale);
pt.Y = HISTGRAM_SIZE.Height - (int)(_ratio / (double)blobData.HistgramPeakRatio[i] * 100 * (double)_yscale);
linePoints[k]= pt;
}
/***HISTGRAM CURVE DRAWING AND ANNOTATION ADD***/
{
Scalar color = new Scalar();
if (i == 0)
{
color = HISTGRAM_LINE_N_COLOR;
}
else if (i == 1)
{
color = HISTGRAM_LINE_B_COLOR;
}
else if (i == 2)
{
color = HISTGRAM_LINE_G_COLOR;
}
else
{
color = HISTGRAM_LINE_R_COLOR;
}
_outHistImgs[i] = ImageCurveDrawing(_outHistImgs[i], linePoints, color, HISTGRAM_LINE_THICK, false);
Cv2.BitwiseOr(histMerge, _outHistImgs[i], histMerge);
string[] annotations = new string[] {
"PeakPixel: "+blobData.HistgramPeakPixel[i].ToString(),
"PeakPixelRatio: "+blobData.HistgramPeakRatio[i].ToString("0.00")}; _outHistImgs[i] = ImageAnnotation(_outHistImgs[i], annotations, fontSize: HISTGRAM_ANNOTATION_FONT, fontInterval: HISTGRAM_ANNOTATION_INTERVAL);
}
}
string[] histMergeAnnotation = num != 1 ? new string[] {
new string("NPeak: "+blobData.HistgramPeakPixel[0].ToString()),
new string("BPeak: "+blobData.HistgramPeakPixel[1].ToString()),
new string("GPeak: "+blobData.HistgramPeakPixel[2].ToString()),
new string("RPeak: "+blobData.HistgramPeakPixel[3].ToString())
} : new string[]
{
new string("NPeak: "+blobData.HistgramPeakPixel[0].ToString())
};
histMerge =ImageAnnotation(histMerge, histMergeAnnotation
, HISTGRAM_ANNOTATION_FONT, HISTGRAM_ANNOTATION_INTERVAL);
blobData.HistgramCurveMergeImage = histMerge.Clone();
blobData.HistgramCurveImage = _outHistImgs;
if (show)
{
ImageShow("ImageHistgramDraw", histMerge);
}
return _outHistImgs;
}
static public Mat ImageHistgramDraw(Mat histData, bool show = false)
{
Mat _hist = histData.Clone();
double maxV = 0, minV = 0;
Point maxL = new Point(), minL = new Point();
Cv2.MinMaxLoc(_hist, out minV, out maxV, out minL, out maxL);
double maxPixel = maxL.Y;
double maxPixRatio = maxV;
int histHeight = HISTGRAM_SIZE.Height;
int histWidth = HISTGRAM_SIZE.Width;
double _xscale = (double)histWidth / 256.0;
double _yscale = histHeight * HISTGRAM_PEAK_HEIGHT_RATIO / 100;
/***CLOOECT HISTGRAM PIXEL AND RATIO TO MAKE THE LINE'S POINTS ARRAY***/
Mat _outHistImgs = new Mat(HISTGRAM_SIZE, MatType.CV_8UC3, HISTGRAM_BACKGROUND);
Point[] linePoints = new Point[256];
for (int k = 0; k < 256; k++)
{
Point pt = new Point();
float _ratio = _hist.Get<float>(k);
pt.X = (int)((double)k * (double)_xscale);
pt.Y = HISTGRAM_SIZE.Height - (int)(_ratio / maxPixRatio * 100 * (double)_yscale);
linePoints[k] = pt;
}
/***HISTGRAM CURVE DRAWING AND ANNOTATION ADD***/
{
Scalar color = HISTGRAM_LINE_N_COLOR;
_outHistImgs = ImageCurveDrawing(_outHistImgs, linePoints, color, HISTGRAM_LINE_THICK, false);
string[] annotations = new string[] {
"PeakPixel: "+maxPixel.ToString(),
"PeakPixelRatio: "+maxPixRatio.ToString("0.00")};
_outHistImgs = ImageAnnotation(_outHistImgs, annotations, fontSize: HISTGRAM_ANNOTATION_FONT, fontInterval: HISTGRAM_ANNOTATION_INTERVAL);
}
if (show)
{
ImageShow("ImageHistgramDraw", _outHistImgs);
}
return _outHistImgs;
}
/// <summary>
/// 清除图像背景
/// </summary>
/// <param name="img"></param>
/// <returns>前景图像,背景为黑</returns>
static public Mat ImageBackgroundClear(Mat img,ref ImageBlobData bbData,int blurSize=3,double thresh=100, bool show = true)
{
if (img == null)
{
ImageWarningShow("The image is Null");
return null;
}
Mat _uImg = img.Clone();
bbData.ConnectedFieldDatas.Source = _uImg.Clone();
_uImg =ImageGrayDetect(_uImg);
bbData.ConnectedFieldDatas.NoneBackgroudGrayImage = _uImg.Clone();
Mat nonBackImg = new Mat();
Cv2.Blur(_uImg, _uImg, new Size(blurSize, blurSize));
Cv2.Threshold(_uImg, _uImg, thresh, 255, ThresholdTypes.Binary);
Mat H = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));
Cv2.MorphologyEx(_uImg, _uImg, MorphTypes.Dilate, H, iterations: 2);
Cv2.MorphologyEx(_uImg, _uImg, MorphTypes.Open, H, iterations: 2);
bbData.ConnectedFieldDatas.ThresholdImage = _uImg.Clone();
Cv2.BitwiseAnd(_uImg, bbData.ConnectedFieldDatas.NoneBackgroudGrayImage, nonBackImg);
bbData.ConnectedFieldDatas.NoneBackgroudGrayImage = nonBackImg.Clone();
if (img.Channels() != 1)
{
Cv2.Merge(new Mat[] { _uImg, _uImg, _uImg }, _uImg);
}
Cv2.BitwiseAnd(_uImg, img, _uImg);
bbData.ConnectedFieldDatas.NoneBackgroudSourImage = _uImg.Clone(); ;
if (show)
{
//GrayMapShow("CROSE_GRAY_MAP", nonBackImg);
ImageShow("NoneBackGroundImageShowing", nonBackImg);
}
return nonBackImg;
}
/// <summary>
/// 通过连通域分割获取图像中每个样品信息
/// </summary>
/// <param name="img"></param>
static public void ImageConnectedFieldSegment(Mat img, ref ImageBlobData bbData, double threshValue = 125, bool show = true)
{
Mat _uImg = img.Clone();
bbData.ConnectedFieldDatas.Source = img.Clone();
if(bbData.ConnectedFieldDatas.ThresholdImage is null)
{
//背景去除效果较好参数
ImageBackgroundClear(_uImg, ref bbData, blurSize: 3, threshValue, show: true);
}
else
{
_uImg = bbData.ConnectedFieldDatas.ThresholdImage;
}
bbData.ConnectedFieldDatas.FieldNum = Cv2.ConnectedComponentsWithStats(
_uImg, bbData.ConnectedFieldDatas.Labels,
bbData.ConnectedFieldDatas.Stats,
bbData.ConnectedFieldDatas.Centroids);
if (bbData.ConnectedFieldDatas.FieldNum < 2)
{
ImageWarningShow("SegementErr:Elements Number Not Enough");
return;
}
Mat mask = new Mat(_uImg.Rows, _uImg.Cols, MatType.CV_8UC3, Scalar.Black);
//对获取的每个连通域进行掩模,分别显示
bbData.ConnectedFieldDatas.ConnectedFieldRects();
bbData.ConnectedFieldDatas.SegmentedFieldContourInformGet();
bbData.ConnectedFieldDatas.SegmentedSubRegionImageGet();
for (int i = 1; i < bbData.ConnectedFieldDatas.FieldNum; i++)
{
mask[bbData.ConnectedFieldDatas.FieldRects[i - 1]].SetTo(Scalar.RandomColor());
}
_uImg.ConvertTo(_uImg, MatType.CV_8UC3, 3);
Mat _outImg = new Mat();
Cv2.Merge(new Mat[] { _uImg, _uImg, _uImg }, _outImg);
Cv2.BitwiseAnd(_outImg, mask, _outImg);
//连通域显示
List<List<string>> fieldAreas = new List<List<string>>();
List<Point> fieldCenters = new List<Point>();
for (int i = 1; i < bbData.ConnectedFieldDatas.FieldNum; i++)
{
fieldAreas.Add(new List<string>() { bbData.ConnectedFieldDatas.Stats.At<int>(i, 4).ToString() });
fieldCenters.Add(new Point(bbData.ConnectedFieldDatas.Centroids.At<double>(i, 0), bbData.ConnectedFieldDatas.Centroids.At<double>(i, 1)));
}
_outImg = ImageAnnotation(ImageAnnotation(_outImg, fieldAreas, fieldCenters),
new string[] { "Connected Fields Number:" + (bbData.ConnectedFieldDatas.FieldNum - 1).ToString() });
bbData.ConnectedFieldDatas.ConnectedFieldAppearImage = _outImg.Clone();
if (show) ImageShow("ConnectedFiledImage", _outImg);
}
/// <summary>
/// 通过预处理后的图像,阈值后进行连通域信息获取
/// </summary>
/// <param name="img"></param>
/// <param name="threshValue"></param>
/// <param name="show"></param>
/// <returns></returns>
static public ImageBlobData ImageConnectedFieldSegment(Mat img, double threshValue = 125,bool binInverse=false, bool show = true)
{
ImageBlobData bbData = new ImageBlobData();
Mat _uImg = img.Clone();
bbData.ConnectedFieldDatas.Source = img.Clone();
Cv2.GaussianBlur(_uImg, _uImg, new Size(3, 3), 3);
Cv2.Threshold(_uImg, _uImg, threshValue, 255, binInverse? ThresholdTypes.BinaryInv: ThresholdTypes.Binary);
bbData.ConnectedFieldDatas.ThresholdImage = _uImg;
bbData.ConnectedFieldDatas.NoneBackgroudGrayImage = _uImg;
bbData.ConnectedFieldDatas.NoneBackgroudSourImage = _uImg;
_uImg = bbData.ConnectedFieldDatas.ThresholdImage;
bbData.ConnectedFieldDatas.FieldNum = Cv2.ConnectedComponentsWithStats(
_uImg, bbData.ConnectedFieldDatas.Labels,
bbData.ConnectedFieldDatas.Stats,
bbData.ConnectedFieldDatas.Centroids);
if (bbData.ConnectedFieldDatas.FieldNum < 2)
{
ImageWarningShow("SegementErr:Elements Number Not Enough");
return null;
}
Mat mask = new Mat(_uImg.Rows, _uImg.Cols, MatType.CV_8UC3, Scalar.Black);
//对获取的每个连通域进行掩模,分别显示
bbData.ConnectedFieldDatas.ConnectedFieldRects();
bbData.ConnectedFieldDatas.SegmentedFieldContourInformGet();
bbData.ConnectedFieldDatas.SegmentedSubRegionImageGet();
for (int i = 1; i < bbData.ConnectedFieldDatas.FieldNum; i++)
{
mask[bbData.ConnectedFieldDatas.FieldRects[i - 1]].SetTo(Scalar.RandomColor());
}
_uImg.ConvertTo(_uImg, MatType.CV_8UC3, 3);
Mat _outImg = new Mat();
Cv2.Merge(new Mat[] { _uImg, _uImg, _uImg }, _outImg);
Cv2.BitwiseAnd(_outImg, mask, _outImg);
//连通域显示
List<List<string>> fieldAreas = new List<List<string>>();
List<Point> fieldCenters = new List<Point>();
for (int i = 1; i < bbData.ConnectedFieldDatas.FieldNum; i++)
{
fieldAreas.Add(new List<string>() { bbData.ConnectedFieldDatas.Stats.At<int>(i, 4).ToString() });
fieldCenters.Add(new Point(bbData.ConnectedFieldDatas.Centroids.At<double>(i, 0), bbData.ConnectedFieldDatas.Centroids.At<double>(i, 1)));
}
_outImg = ImageAnnotation(ImageAnnotation(_outImg, fieldAreas, fieldCenters),
new string[] { "Connected Fields Number:" + (bbData.ConnectedFieldDatas.FieldNum - 1).ToString() });
bbData.ConnectedFieldDatas.ConnectedFieldAppearImage = _outImg.Clone();
if (show) ImageShow("ConnectedFiledImage", _outImg);
return bbData;
}
/// <summary>
/// 图像/直方图数据直方图匹配
/// </summary>
/// <param name="pattern"></param>
/// <param name="train"></param>
/// <param name="compareType"></param>
/// <param name="show"></param>
/// <returns></returns>
static public double ImageHistgramMatcher(Mat pattern, Mat train, int compareType=3,bool show=false)
{
Mat _org = null;
if (pattern.Channels() != train.Channels())
{
ImageWarningShow("ImageHistgramMatcher: Error : Channels not same");
return -1;
}
Mat histP = new Mat();
Mat histT = new Mat();
if (pattern.Cols != 1)
{
Mat _pImg = pattern.Clone();
Mat _tImg = train.Clone();
var geo1= GeometricProcessor.GeometricInformGet(_pImg, show: true);
var geo2=GeometricProcessor.GeometricInformGet(_tImg, show: true);
if (geo1.ContourMinAreaRectWHRation < 0.8 &&
geo2.ContourMinAreaRectWHRation < 0.8
&& Math.Abs(geo1.ContourWidthOriention - geo2.ContourWidthOriention) > 5)
{
/*模板宽高比大:模板角度与匹配图像角度不一致时,几何检测结果影响明显*/
_tImg = GeometricProcessor.ContourWidthOrientionRotation(geo1, geo2, _tImg, show: false);
ImageBlobData tempbbData = ImageConnectedFieldSegment(_tImg, show: false);
_tImg = new Mat(_tImg, tempbbData.ConnectedFieldDatas.FieldRects[0]);
}
_org = ImageCoupleStretch(ref _pImg, ref _tImg, origin: _pImg.Size() != _tImg.Size());
histP = ImageHisgramDataGet(_pImg).HistgramNormalizedDatas[0];
histT = ImageHisgramDataGet(_tImg).HistgramNormalizedDatas[0];
}
else
{
histP = pattern.Clone();
histT = train.Clone();
}
double matchOut = Cv2.CompareHist(histP, histT, (HistCompMethods)compareType);
if (show)
{
Mat _s = ImagesMerge(new Mat[] { ImageHistgramDraw(histP) }, ImageHistgramDraw(histT));
ImageShow("ImageHistgramMatcher",_org != null && _org!=new Mat()
?ImagesMerge( new Mat[] { _org},ImageAnnotation(_s, new string[] {"","", "MatchResult:" + matchOut.ToString("0.000") }),placeOiren:ENUMS.PLACE_ORIENTATION.UP)
: ImageAnnotation(_s, new string[] { "", "", "MatchResult:" + matchOut.ToString("0.000") }));
}
return matchOut;
}
/// <summary>
/// 图像数组根据直方图匹配度进行分类
/// </summary>
/// <param name="imgs">要分类得图像</param>
/// <param name="diffThresh">直方图匹配合格阈值</param>
/// <param name="show">是否显示</param>
/// <returns>图像分类后得list<list<图像>></returns>
static public object ImagesHistgramClassified(Mat[] imgs,double diffThresh=0.3,bool show=false)
{
List<Mat> _imgs = new List<Mat>();
for (int i = 0; i < imgs.Length; i++)
{
_imgs.Add( imgs[i].Clone() );
}
List<List<Mat>> _outImges=new List<List<Mat>>();
while (_imgs.Count != 0)
{
List<Mat> _group = new List<Mat>();
_group.Add(_imgs[0]);
_imgs.RemoveAt(0);
for(int i = 0; i < _imgs.Count; i++)
{
double score = ImageHistgramMatcher(_group[0], _imgs[i], show: true);
if (score <= diffThresh)
{
_group.Add(_imgs[i]);
_imgs.RemoveAt(i);
i -= 1;
}
}
_group = _group.OrderByDescending(pic => pic.Width * pic.Height).ToList();
_outImges.Add(_group);
}
if (show)
{
Mat _showImg = new Mat();
for (int i = 0; i < _outImges.Count; i++)
{
Mat temp = ImagesMerge(_outImges[i].ToArray(), null, ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL, ENUMS.PLACE_ORIENTATION.LEFT);
_showImg = ImagesMerge(new Mat[] { temp }, _showImg, placeOiren: ENUMS.PLACE_ORIENTATION.DOWN);
}
ImageShow("ImagesHistgramClassified", _showImg);
}
return _outImges;
}
/// <summary>
/// 图像数组根据直方图匹配度进行分类
/// </summary>
/// <param name="pairs">要分类得图像+索引</param>
/// <param name="diffThresh">直方图匹配合格阈值</param>
/// <param name="show">是否显示</param>
/// <returns>图像分类后得list<list<索引,图像>></returns>
static public object ImagesHistgramClassified(List<Pair> pairs, double diffThresh = 0.3, bool show = false)
{
List<Pair> _imgs = new List<Pair>();
for (int i = 0; i < pairs.Count; i++)
{
_imgs.Add(new Pair(pairs[i].M1, ((Mat)pairs[i].M2).Clone()));
}
List<List<Pair>> _outImges = new List<List<Pair>>();
while (_imgs.Count != 0)
{
List<Pair> _group = new List<Pair>();
_group.Add(_imgs[0]);
_imgs.RemoveAt(0);
for (int i = 0; i < _imgs.Count; i++)
{
double score = ImageHistgramMatcher((Mat)_group[0].M2, (Mat)_imgs[i].M2, show: true);
if (score <= diffThresh)
{
_group.Add(_imgs[i]);
_imgs.RemoveAt(i);
i -= 1;
}
}
_group = _group.OrderByDescending(pic => ((Mat)pic.M2).Width * ((Mat)pic.M2).Height).ToList();
_outImges.Add(_group);
}
if (show)
{
Mat _showImg = new Mat();
for (int i = 0; i < _outImges.Count; i++)
{
Mat temp = ImagesMerge(PairsMatQueues(_outImges[i]), null, ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL, ENUMS.PLACE_ORIENTATION.LEFT);
_showImg = ImagesMerge(new Mat[] { temp }, _showImg, placeOiren: ENUMS.PLACE_ORIENTATION.DOWN);
}
ImageShow("ImagesHistgramClassified", _showImg);
}
return _outImges;
}
/// <summary>
/// 简单背景直方图匹配器
/// </summary>
/// <param name="_pImg">模板图像</param>
/// <param name="_tImg">测试图像</param>
/// <param name="cleBckRatio">清除背景峰值比</param>
/// <param name="matchThresh">直方图匹配合格阈值</param>
/// <param name="classfyThresh">模板分类合格阈值</param>
/// <param name="show"></param>
/// <returns>直方图匹配结果集合</returns>
static public List<ImageHistMatchResult> SimpleBackgroundHistMatcher(Mat _pImg,Mat _tImg, double thresh=100,double classfyThresh=0.27, double matchThresh = 0.3, bool show=false)
{
List<ImageHistMatchResult> result = new List<ImageHistMatchResult>();
Mat patternImg = _pImg.Clone();
Mat detectImg = _tImg.Clone();
Mat _sImg = detectImg.Clone() ;
if (_sImg.Type() == MatType.CV_8UC1)
{
Cv2.Merge(new Mat[] { _sImg, _sImg, _sImg }, _sImg);
}
ImageBlobData patBbData = new ImageBlobData();
ImageBlobData detBbData = new ImageBlobData();
/***获取图像直方图数据-为去除图像背景做准备***/
patBbData = ImageHisgramDataGet(patternImg);
/***去除图像背景-通过直方图特征***/
ImageBackgroundClear(patternImg, ref patBbData, blurSize: 5, thresh, true);
detBbData = ImageHisgramDataGet(detectImg);
ImageBackgroundClear(detectImg, ref detBbData, blurSize: 5, thresh,true);
/***获取图像连通域信息-为图像分割准备***/
ImageConnectedFieldSegment(detectImg, ref detBbData, thresh, show: true);
ImageConnectedFieldSegment(patternImg, ref patBbData, thresh, show: true);
/***模板图像内部子图分组-减少操作量***/
var classPatGroup = (List<List<Pair>>)ImagesHistgramClassified(
patBbData.ConnectedFieldDatas.SegmentedSubRegionImage,
classfyThresh,show:true);
/***生成随机颜色-为后期显示准备***/
Dictionary<int, Scalar> ColorMatch = MatchColorMaker(classPatGroup.Count);
for (int i = 0; i < detBbData.ConnectedFieldDatas.SegmentedSubRegionImage.Count; i++)
{
Mat _det = ((Mat)detBbData.ConnectedFieldDatas.SegmentedSubRegionImage[i].M2).Clone();
Rect _detRect = detBbData.ConnectedFieldDatas.FieldRects[i];
Rect _patRect = new Rect();
ImageHistMatchResult _res = new ImageHistMatchResult();
double bestScore = double.MaxValue;
int bestID = -1;
for (int j = 0; j < classPatGroup.Count; j++)
{
Mat _pat = ((Mat)classPatGroup[j][0].M2).Clone();
/***对每组图像进行直方图匹配***/
double score = ImageHistgramMatcher(_pat, _det,show:true);
if (score < bestScore)
{
bestScore = score;
bestID = j;
}
}
/***直方图匹配成绩符合设定阈值方位合格***/
if (bestScore< matchThresh)
{
_patRect = patBbData.ConnectedFieldDatas.FieldRects[(int)classPatGroup[bestID][0].M1];
_res.MatchImages = new Pair(((Mat)classPatGroup[bestID][0].M2).Clone(),/*添加匹配图像0-模板,1测试*/
((Mat)detBbData.ConnectedFieldDatas.SegmentedSubRegionImage[i].M2).Clone());
_res.MatchPosition = new Pair(
_patRect,_detRect);/*添加匹配矩形0-模板,1测试*/
_res.MatchScore = bestScore;/*添加匹配成绩*/
_res.PatternID = bestID;/*添加匹配模板ID*/
//_res.ShowResult();
result.Add(_res);
_sImg=ImageAnnotation(_sImg, new List<List<string>> { new List<string>() { bestID.ToString() } },
points: new List<Point>() { new Point(_detRect.X, _detRect.Y) });
}
else
{
bestID = -1;
}/*匹配矩形框绘制-没检查到模板的为白色*/
Cv2.Rectangle(_sImg, _detRect, bestID!=-1? ColorMatch[bestID]:Scalar.White, 2);
}
Mat _patAttach = new Mat();
for(int i =0;i< classPatGroup.Count; i++)
{
_patAttach = ImagesMerge(new Mat[] { (Mat)classPatGroup[i][0].M2 },_patAttach,placeOiren:ENUMS.PLACE_ORIENTATION.DOWN);
}
_sImg = ImagesMerge(new Mat[] { _patAttach } , _sImg);
if(show) ImageShow("SimpleBkgHistMatch", _sImg, 1);
return result;
}
static public Mat HOG_Feature_Extract(Mat gray,float gamma=0.5f,int cellSize=6,int blockSize=3,int orientRegs = 9)
{
Mat uImg = gray.Clone();
Mat hogFeature = new Mat();
Mat xMagnitude = new Mat();
Mat yMagnitude = new Mat();
Mat magnitude = new Mat();
Mat orientation = new Mat();
Mat cellHOG = new Mat();
Mat blockHOG = new Mat();
float orientPow = 2;
Func<float, KeyValuePair<int,float>> RegionCalc = (ang) =>
{
KeyValuePair<int, float> res = new KeyValuePair<int, float>();
int reg = -1;
float coe = 0;
float baseAng = 0;
if (ang >340 || ang <= 20)
{
reg = 0;
baseAng = 0f;
}
else if(ang>20 && ang <= 60)
{
reg = 1;
baseAng = 40f;
}
else if (ang>60 && ang <= 100)
{
reg = 2;
baseAng = 80f;
}
else if (ang > 100 && ang <= 140)
{
reg = 3;
baseAng = 120f;
}
else if (ang > 140 && ang <= 180)
{
reg = 4;
baseAng = 160f;
}
else if (ang > 180 && ang <= 220)
{
reg = 5;
baseAng = 200f;
}
else if (ang > 220 && ang <= 260)
{
reg = 6;
baseAng = 240f;
}
else if (ang > 260 && ang <= 300)
{
reg = 7;
baseAng = 280f;
}
else if (ang > 300 && ang <= 340)
{
reg = 8;
baseAng = 320f;
}
//当幅度方向越接近指定角度时,增加权重,否则大大降低权重
coe = ang != baseAng ? (float)Math.Pow((20f - (float)Math.Abs(ang - baseAng))/20f, orientPow) : 1f;
res = new KeyValuePair<int, float>(reg, coe);
return res;
};
unsafe
{
//窗口尺寸调整->cell尺寸的整数倍
int width = uImg.Width;
int height = uImg.Height;
if (width % cellSize != 0)
{
width = ((width / cellSize) + 1) * cellSize;
}
if (height % cellSize != 0)
{
height = ((height / cellSize) + 1) * cellSize;
}
uImg = uImg.Resize(new Size(width, height), interpolation: InterpolationFlags.Cubic);
//窗口归一
uImg = uImg.Normalize(0, 255, NormTypes.MinMax);
uImg.ConvertTo(uImg, MatType.CV_32FC1);
//窗口gamma处理
float[] gammaCoe = new float[256];
for (int i = 0; i < 256; i++)
{
//增大gamma亮的更亮、暗的更暗,提升边缘处梯度
gammaCoe[i] = (float)(Math.Pow((float)i / (float)255, gamma))*255f;
}
uImg = uImg.Reshape(0, 1);
for (int i = 0; i < width*height; i++)
{
float _pix = ((float*)uImg.Ptr(0))[i];
((float*)uImg.Ptr(0))[i] = gammaCoe[(int)_pix];
}
uImg = uImg.Reshape(0, height);
//x,y方向梯度获取
xMagnitude = uImg.Sobel(MatType.CV_32FC1, 1, 0, 3);
yMagnitude = uImg.Sobel(MatType.CV_32FC1, 0, 1, 3);
//像素梯度&&方向获取
xMagnitude = xMagnitude.Reshape(0, 1);
yMagnitude = yMagnitude.Reshape(0, 1);
magnitude = Mat.Zeros(new Size(width * height, 1), MatType.CV_32FC1);
orientation = Mat.Zeros(new Size(width * height, 1), MatType.CV_32FC1);
//计算幅度和方向
Cv2.CartToPolar(xMagnitude, yMagnitude, magnitude, orientation, true);
uImg=uImg.Reshape(0, height);
magnitude = magnitude.Reshape(0, height);
orientation = orientation.Reshape(0, height);
//单元Descriptor特征获取
int cellRows = height / cellSize;
int cellCols = width / cellSize;
Mat[,] cellDescriptors = new Mat[cellRows, cellCols];
//全部单元HOG提取
for(int i = 0; i < cellRows; i++)
{
for(int j = 0; j < cellCols; j++)
{
Rect cellRect = new Rect(j * cellSize, i * cellSize, cellSize, cellSize);
Mat cellGrad = magnitude[cellRect].Clone().Reshape(0, 1);
Mat cellOrient = orientation[cellRect].Clone().Reshape(0, 1);
//单个cell的hog特征-单行状态
Mat cellDescp = Mat.Zeros(new Size(9, 1), MatType.CV_32FC1);
for(int k = 0; k < cellSize * cellSize; k++)
{
KeyValuePair<int, float> calced = RegionCalc(((float*)cellOrient.Ptr(0))[k]);
((float*)cellDescp.Ptr(0))[calced.Key] += calced.Value * ((float*)cellGrad.Ptr(0))[k];
}
cellDescriptors[i, j] = cellDescp;
}
}
//全部块HOG特征获取,此处block步行距离为1单元
int blockRows = cellRows-blockSize+1;
int blockCols = cellCols-blockSize+1;
for(int i = 0; i < blockRows; i++)
{
for(int j = 0; j < blockCols; j++)
{
Mat blockFeat = new Mat(0,0,MatType.CV_32FC1);
for (int r = i; r < i + blockSize; r++)
{
for (int c = j; c < j + blockSize; c++)
{
blockFeat.PushBack(cellDescriptors[r, c].Clone());
}
}
//通过欧几米德距离归一 dst[i,j]=src[i,j]/(sum[src[i,j]^2])^(1/2)
//通过曼哈顿距离归一 dst[i,j]=src[i,j]/(sum[|src[i,j]|])
//由于局部光照的变化以及前景和背景对比度的变化,
//使得梯度幅度的变化范围非常大(从0 ~ 256)。
//这就需要对梯度幅度做归一化。归一化能够进一步地对光照、阴影和边缘进行压缩
//其本质是起到了提升性能的作用
blockFeat = blockFeat.Normalize(normType: NormTypes.MinMax);
//block内部转单行
blockFeat =blockFeat.Reshape(0, 1);
hogFeature.PushBack(blockFeat);
}
}
//输出特征转单行
hogFeature= hogFeature.Reshape(0, 1);
}
return hogFeature;
}
}
}
c#二值化特征相关提取
最新推荐文章于 2024-03-13 08:52:35 发布