using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.Util;
using Emgu.CV.Features2D;
using Emgu.CV.Util;
using System.Diagnostics;
using Emgu.CV.GPU;
namespace orb
{
public partial class Form1 : Form
{
static string filename1;
static string filename2;
public Form1()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
OpenFileDialog open = new OpenFileDialog();
if (open.ShowDialog() == DialogResult.OK)
{
filename1 = open.FileName;
pictureBox1.Load(filename1);
}
}
private void button2_Click(object sender, EventArgs e)
{
OpenFileDialog open = new OpenFileDialog();
if (open.ShowDialog() == DialogResult.OK)
{
filename2 = open.FileName;
}
}
public static Image<Bgr, Byte> Draw(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime)
{
Stopwatch watch;
HomographyMatrix homography = null;
//ORBDetector orbCPU = new ORBDetector(500,1.2f,8,2,1,2, ORBDetector.ScoreType.Fast,31);
ORBDetector orbCPU = new ORBDetector(500);
SURFDetector surfCPU = new SURFDetector(500, false);
VectorOfKeyPoint modelKeyPoints;
VectorOfKeyPoint observedKeyPoints;
Matrix<int> indices;
Matrix<byte> mask;
int k = 32;
double uniquenessThreshold = 0.8;
if (GpuInvoke.HasCuda)
{
}
else
{
//extract features from the object image
modelKeyPoints = orbCPU.DetectKeyPointsRaw(modelImage, null);
Matrix<byte> modelDescriptors = orbCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
watch = Stopwatch.StartNew();
observedKeyPoints = orbCPU.DetectKeyPointsRaw(observedImage, null);
Matrix<byte> observedDescriptors = orbCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
BruteForceMatcher<byte> matcher = new BruteForceMatcher<byte>(DistanceType.Hamming);
matcher.Add(modelDescriptors);
indices = new Matrix<int>(observedDescriptors.Rows, 2);
using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, 2))
{
matcher.KnnMatch(observedDescriptors, indices, dist, 2, null);
mask = new Matrix<byte>(dist.Rows, 1);
mask.SetValue(255);
Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
}
int nonZeroCount = CvInvoke.cvCountNonZero(mask);
if (nonZeroCount >= 4)
{
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
if (nonZeroCount >= 4)
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
}
watch.Stop();
}
//Draw the matched keypoints
Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
#region draw the projected region on the image
if (homography != null)
{ //draw a rectangle along the projected model
Rectangle rect = modelImage.ROI;
PointF[] pts = new PointF[] {
new PointF(rect.Left, rect.Bottom),
new PointF(rect.Right, rect.Bottom),
new PointF(rect.Right, rect.Top),
new PointF(rect.Left, rect.Top)};
homography.ProjectPoints(pts);
result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
}
#endregion
matchTime = watch.ElapsedMilliseconds;
return result;
}
private void button3_Click(object sender, EventArgs e)
{
Image<Gray, byte> src1 = new Image<Gray, byte>(filename1);
Image<Gray, byte> src2 = new Image<Gray, byte>(filename2);
Image<Bgr, byte> dst = new Image<Bgr, byte>(src1.Size);
long time;
dst= Draw(src2, src1, out time);
pictureBox1.Image = dst.ToBitmap();
}
}
}