主窗口:
弹出窗口:
主窗口源码:
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Windows.Forms;
using ZXing;
using ZXing.Common;
using ZXing.QrCode.Internal;
using OpenCvSharp;
using OpenCvSharp.Extensions;
namespace QR_Code_Finder
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
//加载图像
private void btn_loadImage_Click(object sender, EventArgs e)
{
if (this.txtBoxFilePath.Text.Trim() != string.Empty)
{
this.pictureBox1.Image = Image.FromFile(this.txtBoxFilePath.Text);
}
}
//识别
private void btn_Recognize_Click(object sender, EventArgs e)
{
if (this.txtBoxFilePath.Text.Trim() != string.Empty)
{
//this.Detect1();
//Bitmap invoiceImage = new Bitmap(this.txtBoxFilePath.Text);
MessageBox.Show(this.Detect(new Bitmap(this.txtBoxFilePath.Text)));
}
}
public void Detect1()
{
BarcodeReader reader = new BarcodeReader();
reader.Options.CharacterSet = "UTF-8";
Bitmap map = new Bitmap(this.txtBoxFilePath.Text);
Result result = reader.Decode(map);
if (result == null)
MessageBox.Show("识别失败");
else
MessageBox.Show(result.Text);
}
//检测二维码
public string Detect(Bitmap bitmap)
{
try
{
//此类层次结构的目的是将跨平台的不同位图实现抽象为一个标准接口,用于请求灰度亮度值。 接口只提供不可变的方法;
//因此裁剪和旋转创建副本。 这是为了确保一个 Reader 不会修改原始亮度源并使其对链中的其他 Reader 处于未知状态。
LuminanceSource source = new BitmapLuminanceSource(bitmap);//亮度源
//https://zxing.github.io/zxing/apidocs/com/google/zxing/common/HybridBinarizer.html
var binarizer = new HybridBinarizer(source);//混合二值化器 得到二值图像binarizer
var binBitmap = new BinaryBitmap(binarizer);//维护该binarizer二值图像的类 https://blog.csdn.net/weixin_45883443/article/details/120853290
/*将亮度数据的 2D 数组转换为 1 位。 如上,假设这个方法很昂贵并且不要重复调用它。
* 此方法用于解码二维条码,可能会或可能不会应用锐化。
* 因此,该矩阵中的一行可能与使用 getBlackRow() 获取的行不同,因此不要在它们之间混合和匹配。*/
BitMatrix bm = binBitmap.BlackMatrix;//图像的二维位数组(true 表示黑色)。
Detector detector = new Detector(bm);//初始化 ZXing.QrCode.Internal.Detector 类的新实例。
/*封装在图像中检测条形码的结果。 这包括与条形码相对应的黑/白像素的原始矩阵,以及图像中可能的兴趣点,
* 例如图像中查找器图案或条形码角落的位置。*/
DetectorResult result = detector.detect();
string retStr = "Found at points ";
foreach (ResultPoint point in result.Points)//遍历找到结果的像素点
{
retStr += point.ToString() + ", ";
}
return retStr;
}
catch
{
return "Failed to detect QR code.";
}
}
//打开文件
private void btn_OpenFile_Click(object sender, EventArgs e)
{
OpenFileDialog openFileDialog1 = new OpenFileDialog();
openFileDialog1.InitialDirectory = @"C:\Users\Zohar\Pictures\Saved Pictures\";
openFileDialog1.RestoreDirectory = true;
if (openFileDialog1.ShowDialog() == DialogResult.OK)
{
this.txtBoxFilePath.Text = openFileDialog1.FileName;
this.btn_loadImage_Click(sender, e);//加载图像
}
}
//opencv 处理
private void btn_OpenCV_Processing_Click(object sender, EventArgs e)
{
if (this.txtBoxFilePath.Text == string.Empty)
{
MessageBox.Show("请选择文件");
return;
}
using (Bitmap bitmap = (Bitmap)Bitmap.FromFile(this.txtBoxFilePath.Text))
{
//this.QR_Find(bitmap);
this.pictureProcessing(bitmap);
}
}
//图像处理
private void pictureProcessing(Bitmap bitmap)
{
//var imageSrc = new Mat(new OpenCvSharp.Size(bitmap.Width, bitmap.Height), MatType.CV_8UC1);
var imageSrc = BitmapConverter.ToMat(bitmap);//bitmap—>Mat
var imageOrignal = new Mat();
imageSrc.CopyTo(imageOrignal);//拷贝源图像
//BitmapConverter.ToMat(bitmap, imageSrc);
//var imageSrc = new Mat(new OpenCvSharp.Size(bitmap.Width, bitmap.Height), MatType.CV_8UC1);
//imageTemp.CopyTo(imageSrc);
//var imageTarget = new Mat(new OpenCvSharp.Size(imageSrc.Width, imageSrc.Height), MatType.CV_8UC1);
imageSrc.CvtColor(ColorConversionCodes.RGB2GRAY, 0);//转为灰度图
imageSrc.GaussianBlur(new OpenCvSharp.Size(0, 0), 5, 5, BorderTypes.Default);//高斯滤波
var imageTarget = imageSrc.Canny(50, 200, 3, false);//canny边缘检测
//Cv2.CvtColor(imageTarget, imageTarget, ColorConversionCodes.RGB2GRAY, 0);
//Cv2.InRange(imageSrc, new Scalar(100, 190, 5), new Scalar(130, 255, 15), imageTarget);
//Cv2.InRange(imageSrc, new Scalar(0, 0, 0),new Scalar(0, 0, 0), imageTarget);
//this.pictureBox1.Image = BitmapConverter.ToBitmap(imageSrc);CV_8UC1
//OpenCvSharp.Point[][] contours;
//HierarchyIndex[] hierarchy;
//Cv2.FindContours(imageTarget, out contours, out hierarchy, RetrievalModes.List, ContourApproximationModes.ApproxSimple, null);
Mat[] contours;//轮廓矩阵
Mat hierarchy = new Mat(new OpenCvSharp.Size(imageSrc.Width, imageSrc.Height), MatType.CV_8UC1);//背景图
/*可选的输出向量,包含有关图像拓扑的信息。 它具有与轮廓数一样多的元素。
* 对于每个第i个轮廓contours[i],元素hierarchy[i]的成员在相同层次级别的下一个和前一个轮廓,
* 第一个子轮廓和父轮廓的轮廓中设置为从0开始的索引, 分别。 如果轮廓 i 没有下一个、上一个、父级或嵌套轮廓,
* 则 hierarchy[i] 的相应元素将为负数。*/
Cv2.FindContours(imageTarget, out contours, hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, null);
Array.Sort<Mat>(contours, new MatAreaCompareReverse<Mat>()); //按照查找出轮廓的面积倒叙排列;
int i = 0, j = 20;
foreach (Mat contour in contours)//遍历所有轮廓mat
{
var peri = contour.ArcLength(true);//计算轮廓周长或曲线长度。 输入是二维点集,由 CV_32SC2 或 CV_32FC2 矩阵表示。
//使用 Douglas-Peucker 算法逼近轮廓或曲线。 输入是要逼近的多边形或曲线,它必须是 CV_32SC2 或 CV_32FC2 类型的 1 x N 或 N x 1 矩阵。
var approx = contour.ApproxPolyDP(0.02 * peri, true);//近似的结果; 类型应与输入曲线的类型相匹配
if (approx.Rows <= 7 && contour.ContourArea(false) > 5000)
{
approx.DrawContours(imageOrignal, contours, i++, Scalar.Blue, 5, LineTypes.Link8, null, 0, null);//绘制轮廓
//计算轮廓面积。 输入是二维点集,由 CV_32SC2 或 CV_32FC2 矩阵表示。
Console.WriteLine(contour.ContourArea(false).ToString());
}
}
Window w = new Window(WindowMode.Normal, imageOrignal);//新建窗口 显示图像
w.Resize(imageOrignal.Width / 2, imageOrignal.Height / 2);//缩放窗口
w.ShowImage(imageOrignal);//显示绘制轮廓的图像
this.pictureBox1.Image = BitmapConverter.ToBitmap(imageSrc);//显示边缘检测的图像
}
private void QR_Find(Bitmap bitmap)
{
if (bitmap == null)
{
MessageBox.Show("无图像数据");
this.btn_OpenFile.Focus();
return;
}
var imageSrc = BitmapConverter.ToMat(bitmap);
//var imageSrc = Cv2.ImRead(@"E:\sample\无法识别影像.bmp", ImreadModes.AnyColor);
//var imageSrc = Cv2.ImRead(this.txtBoxFilePath.Text, ImreadModes.GrayScale);
//Cv2.Canny(imageSrc, imageSrc, 75, 200, 3, false);
var imageTobeFind = Cv2.ImRead(@"E:\sample\无法定位.bmp", ImreadModes.AnyColor);
//Cv2.Canny(imageTobeFind, imageTobeFind, 75, 200, 3, false);
//var imageTarget = new Mat(new OpenCvSharp.Size(imageSrc.Width - imageTobeFind.Width + 1, imageSrc.Height - imageTobeFind.Height + 1), MatType.CV_32FC1);
var imageTarget = new Mat(new OpenCvSharp.Size(imageSrc.Width, imageSrc.Height), MatType.CV_32FC1);
Cv2.MatchTemplate(imageSrc, imageTobeFind, imageTarget, TemplateMatchModes.CCoeff);
OpenCvSharp.Point minloc, maxloc;
Cv2.MinMaxLoc(imageTarget, out minloc, out maxloc);
//Cv2.Rectangle(imageSrc, minloc, new OpenCvSharp.Point(minloc.X + imageTobeFind.Width, minloc.Y + imageTobeFind.Height), Scalar.Red, 5, LineTypes.Link8, 0);
Cv2.Rectangle(imageSrc, maxloc, new OpenCvSharp.Point(maxloc.X + imageTobeFind.Width, maxloc.Y + imageTobeFind.Height), Scalar.Red, 5, LineTypes.Link8, 0);
//Cv2.GaussianBlur(imageSrc, imageTarget, new OpenCvSharp.Size(5, 5), 0, 0, BorderTypes.Isolated);
//var imageTarget1 = new Mat(new OpenCvSharp.Size(imageSrc.Width, imageSrc.Height), MatType.CV_64FC1);
//Cv2.Canny(imageTarget, imageTarget, 75, 200, 3, false);
//var origImage = new Mat();
//image.CopyTo(origImage);
//Cv2.GaussianBlur()
//image = image.CvtColor(ColorConversionCodes.BGR2GRAY, 0);
//var openCvSize = new OpenCvSharp.Size();
//image = image.GaussianBlur(openCvSize, 5, 5, BorderTypes.Default);
//image = image.Canny(200, 75, 3, false);
//Cv2.ImShow("OpenCV1", image);
this.pictureBox1.Image = BitmapConverter.ToBitmap(imageSrc);
}
}
class MatAreaCompareReverse<T> : IComparer<T>
where T : Mat
{
public int Compare(T x, T y)
{
return -x.ContourArea(false).CompareTo(y.ContourArea(false));
}
}
}
未使用代码Utility_NotUse.cs:
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenCvSharp;
namespace QR_Code_Finder
{
class Utility_NotUse
{
private static byte[] ImageToByte2(Image img)
{
using (var stream = new MemoryStream())
{
img.Save(stream, System.Drawing.Imaging.ImageFormat.Bmp);
return stream.ToArray();
}
}
private static Bitmap SetBitmapRawData1(Bitmap bitmap)
{
using (MemoryStream ms = new MemoryStream())
{
bitmap.Save(ms, ImageFormat.Bmp);
byte[] bitmapData = ms.GetBuffer();
const int BITMAP_HEADER_OFFSET = 54;
Color colorValue = Color.Purple;
for (int i = 0; i < bitmapData.Length; i += 4)
{
bitmapData[BITMAP_HEADER_OFFSET + i] = colorValue.R;
bitmapData[BITMAP_HEADER_OFFSET + i + 1] = colorValue.G;
bitmapData[BITMAP_HEADER_OFFSET + i + 2] = colorValue.B;
bitmapData[BITMAP_HEADER_OFFSET + i + 3] = colorValue.A;
}
return new Bitmap(ms);
}
}
private static Bitmap SetBitmapRawData(Bitmap bitmap)
{
// Lock the bitmap's bits.
Rectangle rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
System.Drawing.Imaging.BitmapData bmpData = bitmap.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, bitmap.PixelFormat);
// Get the address of the first line.
IntPtr ptr = bmpData.Scan0;
// Declare an array to hold the bytes of the bitmap.
byte[] bitmapData = new byte[Math.Abs(bmpData.Stride) * bitmap.Height];
// Copy the RGB values into the array.
System.Runtime.InteropServices.Marshal.Copy(ptr, bitmapData, 0, bitmapData.Length);
Color colorValue = Color.Purple;
for (int i = 0; i < bitmapData.Length; i += 4)
{
bitmapData[i] = colorValue.R;
bitmapData[i + 1] = colorValue.G;
bitmapData[i + 2] = colorValue.B;
bitmapData[i + 3] = colorValue.A;
}
// Copy the RGB values back to the bitmap
System.Runtime.InteropServices.Marshal.Copy(bitmapData, 0, ptr, bitmapData.Length);
// Unlock the bits.
bitmap.UnlockBits(bmpData);
return bitmap;
}
}
class sampleCode1
{
/// <summary>
/// https://github.com/Itseez/opencv_extra/blob/master/learning_opencv_v2/ch9_watershed.cpp
/// </summary>
private static void watershedExample()
{
var src = new Mat(@"..\..\Images\corridor.jpg", ImreadModes.AnyDepth | ImreadModes.AnyColor);
var srcCopy = new Mat();
src.CopyTo(srcCopy);
var markerMask = new Mat();
Cv2.CvtColor(srcCopy, markerMask, ColorConversionCodes.BGRA2GRAY);
var imgGray = new Mat();
Cv2.CvtColor(markerMask, imgGray, ColorConversionCodes.GRAY2BGR);
markerMask = new Mat(markerMask.Size(), markerMask.Type(), s: Scalar.All(0));
var sourceWindow = new Window("Source (Select areas by mouse and then press space)")
{
Image = srcCopy
};
var previousPoint = new OpenCvSharp.Point(-1, -1);
sourceWindow.OnMouseCallback += (@event, x, y, flags) =>
{
if (x < 0 || x >= srcCopy.Cols || y < 0 || y >= srcCopy.Rows)
{
return;
}
if (@event == MouseEvent.LButtonUp || !flags.HasFlag(MouseEvent.FlagLButton))
{
previousPoint = new OpenCvSharp.Point(-1, -1);
}
else if (@event == MouseEvent.LButtonDown)
{
previousPoint = new OpenCvSharp.Point(x, y);
}
else if (@event == MouseEvent.MouseMove && flags.HasFlag(MouseEvent.FlagLButton))
{
var pt = new OpenCvSharp.Point(x, y);
if (previousPoint.X < 0)
{
previousPoint = pt;
}
Cv2.Line(img: markerMask, pt1: previousPoint, pt2: pt, color: Scalar.All(255), thickness: 5);
Cv2.Line(img: srcCopy, pt1: previousPoint, pt2: pt, color: Scalar.All(255), thickness: 5);
previousPoint = pt;
sourceWindow.Image = srcCopy;
}
};
var rnd = new Random();
for (;;)
{
var key = Cv2.WaitKey(0);
if ((char)key == 27) // ESC
{
break;
}
if ((char)key == 'r') // Reset
{
markerMask = new Mat(markerMask.Size(), markerMask.Type(), s: Scalar.All(0));
src.CopyTo(srcCopy);
sourceWindow.Image = srcCopy;
}
if ((char)key == 'w' || (char)key == ' ') // Apply watershed
{
OpenCvSharp.Point[][] contours; //vector<vector<Point>> contours;
HierarchyIndex[] hierarchyIndexes; //vector<Vec4i> hierarchy;
Cv2.FindContours(
markerMask,
out contours,
out hierarchyIndexes,
mode: RetrievalModes.CComp,
method: ContourApproximationModes.ApproxSimple);
if (contours.Length == 0)
{
continue;
}
var markers = new Mat(markerMask.Size(), MatType.CV_32S, s: Scalar.All(0));
var componentCount = 0;
var contourIndex = 0;
while ((contourIndex >= 0))
{
Cv2.DrawContours(
markers,
contours,
contourIndex,
color: Scalar.All(componentCount + 1),
thickness: -1,
lineType: LineTypes.Link8,
hierarchy: hierarchyIndexes,
maxLevel: int.MaxValue);
componentCount++;
contourIndex = hierarchyIndexes[contourIndex].Next;
}
if (componentCount == 0)
{
continue;
}
var colorTable = new List<Vec3b>();
for (var i = 0; i < componentCount; i++)
{
var b = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
var g = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
var r = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
colorTable.Add(new Vec3b((byte)b, (byte)g, (byte)r));
}
Cv2.Watershed(src, markers);
var watershedImage = new Mat(markers.Size(), MatType.CV_8UC3);
// paint the watershed image
for (var i = 0; i < markers.Rows; i++)
{
for (var j = 0; j < markers.Cols; j++)
{
var idx = markers.At<int>(i, j);
if (idx == -1)
{
watershedImage.Set(i, j, new Vec3b(255, 255, 255));
}
else if (idx <= 0 || idx > componentCount)
{
watershedImage.Set(i, j, new Vec3b(0, 0, 0));
}
else
{
watershedImage.Set(i, j, colorTable[idx - 1]);
}
}
}
watershedImage = watershedImage * 0.5 + imgGray * 0.5;
Cv2.ImShow("Watershed Transform", watershedImage);
Cv2.WaitKey(1); //do events
}
}
sourceWindow.Dispose();
Cv2.DestroyAllWindows();
src.Dispose();
}
}
}