C#调用C++(Opencv3)生成的dll文件

C#调用C++(Opencv3)生成的dll文件

1、新建一个c++dll工程

1.1、这里选择创建一个win32控制台程序,点击确定

在这里插入图片描述

1.2、点击下一步,别点完成

在这里插入图片描述

1.3、选择DLL(D),选择空项目,点击完成,项目建立成功

在这里插入图片描述在这里插入图片描述

1.4、选择release,X64;右键源文件—》添加—》新建项,添加一个.cpp文件,点击添加

在这里插入图片描述在这里插入图片描述

2、.cpp文件中添加C#需要调用的代码

2.1、代码如下,注意函数接口,图像用IplImage传输,不能使用Mat类传输,同时bool类型变量传输C#调用也会出错,尽量使用int类型。

#include<iostream>
#include<opencv2/opencv.hpp>
using namespace std;
//函数声明,只有声明了,才能被调用
extern"C" __declspec(dllexport) void testdll(IplImage* src, IplImage* dst, int x);
void testdll(IplImage* src, IplImage* dst, int x)       //参数未使用
{
	//图像转为Mat
	cv::Mat srcImage = cv::cvarrToMat(src);

	//图像处理
	cv::Mat dstImage;
	cv::blur(srcImage, dstImage, cv::Size(9, 9));

	//图像回传
	IplImage temp = dstImage;
	*dst = temp;
}

在这里插入图片描述

2.2、dll生成,生成—》重新生成解决方案(或者生成解决方案),生成成功,生成目录如输出窗口所示。

在这里插入图片描述
在这里插入图片描述

3、C#调用opencv C++dll的代码

在路径下面放置dll文件后,通过下面两句声明后即可在C#中调用dll中的该函数(多个函数时,每一个函数都需要单独声明),注意IplImage* 对应C#中的IntPtr。

 [DllImport("tesstdll.dll")]
  public static extern void testdll(IntPtr src, IntPtr dst, int input);

下面的代码为C#工程代码,因为本人对C#代码也不懂,以下C#为同事所写,略为混乱且有一定的bug仅供参考,仅供参考,仅供参考。大概步骤是读取一张图片,格式转换(较为复杂,函数都在下面),再调用dll中函数。

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.Util;
using Emgu.CV.Structure;
using System.Drawing;
using System.Drawing.Imaging;
using System.Threading;
using System.Diagnostics;

namespace WindowsFormsApplication16
{
    public partial class Form1 : Form
    {
        public Form1()
        {
            InitializeComponent();
            timer1.Start();
        }

        [DllImport("tesstdll.dll")]
        public static extern void testdll(IntPtr src, IntPtr dst, int input);

        private void button1_Click(object sender, EventArgs e)
        {
            Mat srcmat = CvInvoke.Imread("00.jpg");
            //Image<Bgr, byte> m1 = new Image<Bgr, byte>("00.jpg");
            imageBox1.Image = srcmat;
            Image<Bgr, byte> dst = srcmat.ToImage<Bgr, byte>();
            IntPtr dstptr = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(dst), Emgu.CV.CvEnum.IplDepth.IplDepth_8U, 3);
            testdll(EmguCVMatToIplImagePointer(srcmat), dstptr, 0);    
            IMG_01.Image = IplImagePointerToEmgucvMat(dstptr);
            #region

            //GetMat(EmguCVMatToIplImagePointer(matSrc));
            //Image<Bgr, byte> m1 = new Image<Bgr, byte>("00.jpg");
            //IMG_01.Image = m1;
            //Image<Bgr, byte> m2 = new Image<Bgr, byte>("01.jpg");
            //IMG_02.Image = m2;
            //Image<Bgr, byte> m3 = new Image<Bgr, byte>("02.jpg");
            //IMG_03.Image = m3;


            //IntPtr ptr = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(m1), Emgu.CV.CvEnum.IplDepth.IplDepth_8U, 3);
            //MessageBox.Show("开始");
            lbl_01.Text = (pipesSortOrder(EmgucvImageToIplImagePointer(m1), ptr, false, false, false, 0)).ToString();
            //lbl_01.Text = (pipesSortOrder(m1.Ptr, ptr, 0, 0, 0, Convert.ToInt32(0))).ToString();
            pipesSortOrder2();
            EmgucvImageToIplImagePointer
            //IMG_01.Image = IplImagePointerToEmgucvMat(ptr);
            DialogResult reslult = MessageBox.Show("运行完毕!", "提示", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
            //IntPtr ptr2 = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(m2), Emgu.CV.CvEnum.IplDepth.IplDepth_8U, 3);
            //lbl_02.Text = (pipesSortOrder(m2.Ptr, ptr2, 1, 0, 0, 0)).ToString();
            //IMG_02.Image = IplImagePointerToEmgucvMat(ptr2);

            //IntPtr ptr3 = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(m3), Emgu.CV.CvEnum.IplDepth.IplDepth_8U, 3);
            //lbl_03.Text = (pipesSortOrder(m3.Ptr, ptr3, 1, 0, 0, 0)).ToString();
            //IMG_03.Image = IplImagePointerToEmgucvMat(ptr3);

            //tsp9_pipes_dll.dll
            //GetMat(src.Ptr);
            //pictureBox1.Image = src.ToBitmap();

            /*  测试成功段内容
            //IntPtr SS = EmgucvImageToIplImagePointer(m1);
            //IntPtr ptr2 = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(m1), Emgu.CV.CvEnum.IplDepth.IplDepth_8U, 3);

            //int reslut = histo_enhance(SS, ptr2);
            //label1.Text = reslut.ToString();
            //IMG_03.Image = IplImagePointerToEmgucvImage<Bgr, byte>(ptr2);
            //*/

            //MIplImage mlI = (MIplImage)Marshal.PtrToStructure(ptr, typeof(MIplImage));
            //Image<Bgr, byte> outframe = new Image<Bgr, byte>(mlI.Width, mlI.Height, mlI.WidthStep, mlI.ImageData);
            //pictureBox2.Image = outframe.ToBitmap();
            //matSrc.ToImage
            //imageBox1.Image = matSrc;


            #endregion

        }

        private void MatDeal()
        {
            Stopwatch sw = new Stopwatch();
            VideoCapture VCapture = new VideoCapture("newpipe.mp4");
            int count = Convert.ToInt32(VCapture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameCount));
            bool isstart = false;
            for (int i = 1; i <= count; i++)
            {
                Mat src = new Mat();
                VCapture.Read(src);
                Image<Bgr, byte> image = src.ToImage<Bgr, byte>();
                IntPtr ptr2 = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(image), Emgu.CV.CvEnum.IplDepth.IplDepth_8U, 3);
                int reslut = -1;
                if (!isstart)
                {
                    reslut = pipesSortOrder(image.Ptr, ptr2, 0, 0, 0, 0);
                    isstart = true;
                    sw.Start();
                }
                else
                    reslut = pipesSortOrder(image.Ptr, ptr2, 1, 0, 0, 0);
                Mat dst = IplImagePointerToEmgucvMat(ptr2);
                long inter = sw.ElapsedMilliseconds;
                //Image<Bgr, byte> dst = IplImagePointToEmgucvIImage<Bgr, byte>(ptr2);

                try
                {
                    Invoke(new Action(() =>
                    {
                        //lbl_02.Text = reslut.ToString()+"       "+inter.ToString ();
                        //IMG_02.Image = dst;
                    }));
                }
                catch (Exception)
                {
                    throw;
                }


                Thread.Sleep(30);
            }
        }

        //private Image ShowIplImageInWindow(IntPtr src)
        //{
        //    MIplImage img = (MIplImage)Marshal.PtrToStructure(src, typeof(MIplImage));

        //    Bitmap disp = new Bitmap(img.Width, img.Height, PixelFormat.Format24bppRgb);
        //    BitmapData bmp = disp.LockBits(new Rectangle(0, 0, img.Width, img.Height), ImageLockMode.WriteOnly, PixelFormat.Format24bppRgb);
        //    long linebytes = (img.Width * 24 + 31) / 32 * 4;
        //    unsafe
        //    {
        //        byte* pixel = (byte*)bmp.Scan0.ToPointer();
        //        if (img.NChannels == 3)
        //        {
        //            for (int i = 0; i < img.Height; i++)
        //            {
        //                for (int j = 0, n = 0; j < img.Width * 3; j++, n++)
        //                {
        //                    byte b = ((byte*)img.ImageData + img.WidthStep * i)[3 * j];
        //                    byte g = ((byte*)img.ImageData + img.WidthStep * i)[3 * j + 1];
        //                    byte r = ((byte*)img.ImageData + img.WidthStep * i)[3 * j + 2];

        //                    *(pixel + linebytes * (i) + n) = b;
        //                    n++;
        //                    *(pixel + linebytes * (i) + n) = g;
        //                    n++;
        //                    *(pixel + linebytes * (i) + n) = r;
        //                }
        //            }
        //        }
        //        else if (img.NChannels == 1)
        //        {
        //            for (int i = 0; i < img.Height; i++)
        //            {
        //                for (int j = 0, n = 0; j < img.Width; j++, n++)
        //                {
        //                    byte g = ((byte*)img.ImageData + img.WidthStep * i)[j];

        //                    *(pixel + linebytes * (i) + n) = g;
        //                    n++;
        //                    *(pixel + linebytes * (i) + n) = g;
        //                    n++;
        //                    *(pixel + linebytes * (i) + n) = g;
        //                }
        //            }
        //        }
        //        else
        //        {
        //            return null;
        //        }
        //        disp.UnlockBits(bmp);
        //        return (Image)disp;
        //    }
        //}


        //在.net中使用OpenCv和EmguCv时,必须在三者支持的图像格式之间进行转换。.net中用Bitmap类来承载图像,OpenCv中用IplImage指针来承载图像,
        //   EmguCv中用Image<TColor,TDepth>来承载图像。本文主要讲述如何在IplImage、Image<TColor,TDepth>和Bitmap之间转换。

        #region  实现 IplImage  <=>  Image<TColor,TDepth>
        //    IplImage  <=>  Image<TColor,TDepth>
        //用了MIplImage的辅助,我们可以很容易实现IplImage指针和Image<TColor,TDepth>之间的转换。


        /// 将MIplImage结构转换到IplImage指针;
        /// 注意:指针在使用完之后必须用Marshal.FreeHGlobal方法释放。
        /// <param name="mi">MIplImage对象</param>
        /// <returns>返回IplImage指针</returns>
        public static IntPtr MIplImageToIplImagePointer(MIplImage mi)
        {
            IntPtr ptr = Marshal.AllocHGlobal(mi.NSize);
            Marshal.StructureToPtr(mi, ptr, false);
            return ptr;
        }

        /// 将IplImage指针转换成MIplImage结构
        /// <param name="ptr">IplImage指针</param>
        /// <returns>返回MIplImage结构</returns>
        public static MIplImage IplImagePointerToMIplImage(IntPtr ptr)
        {
            return (MIplImage)Marshal.PtrToStructure(ptr, typeof(MIplImage));
        }

        /// 将IplImage指针转换成Emgucv中的Image对象;
        /// 注意:这里需要您自己根据IplImage中的depth和nChannels来决定
        /// <typeparam name="TColor">Color type of this image (either Gray, Bgr, Bgra, Hsv, Hls, Lab, Luv, Xyz or Ycc)</typeparam>
        /// <typeparam name="TDepth">Depth of this image (either Byte, SByte, Single, double, UInt16, Int16 or Int32)</typeparam>
        /// <param name="ptr">IplImage指针</param>
        /// <returns>返回Image对象</returns>
        public static Image<TColor, TDepth> IplImagePointerToEmgucvImage<TColor, TDepth>(IntPtr ptr)
            where TColor : struct, IColor
            where TDepth : new()
        {
            MIplImage mi = IplImagePointerToMIplImage(ptr);
            return new Image<TColor, TDepth>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
        }
        /// <summary>
        /// 将IplImage指针转换成Emgucv中的IImage接口;
        /// 1通道对应灰度图像,3通道对应BGR图像,4通道对应BGRA图像。
        /// 注意:3通道可能并非BGR图像,而是HLS,HSV等图像
        /// </summary>
        /// <param name="ptr">IplImage指针</param>
        /// <returns>返回IImage接口</returns>
        public static IImage IplImagePointToEmgucvIImage(IntPtr ptr)
        {
            MIplImage mi = IplImagePointerToMIplImage(ptr);
            Type tColor;
            Type tDepth;
            string unsupportedDepth = "不支持的像素位深度IPL_DEPTH";
            string unsupportedChannels = "不支持的通道数(仅支持1,2,4通道)";
            switch (mi.NChannels)
            {
                case 1:
                    tColor = typeof(Gray);
                    switch (mi.Depth)
                    {
                        case Emgu.CV.CvEnum.IplDepth.IplDepth_8U:
                            tDepth = typeof(Byte);
                            return new Image<Gray, Byte>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16U:
                            tDepth = typeof(UInt16);
                            return new Image<Gray, UInt16>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16S:
                            tDepth = typeof(Int16);
                            return new Image<Gray, Int16>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth32S:
                            tDepth = typeof(Int32);
                            return new Image<Gray, Int32>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth32F:
                            tDepth = typeof(Single);
                            return new Image<Gray, Single>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth64F:
                            tDepth = typeof(Double);
                            return new Image<Gray, Double>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                case 3:
                    tColor = typeof(Bgr);
                    switch (mi.Depth)
                    {
                        case Emgu.CV.CvEnum.IplDepth.IplDepth_8U:
                            tDepth = typeof(Byte);
                            return new Image<Bgr, Byte>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16U:
                            tDepth = typeof(UInt16);
                            return new Image<Bgr, UInt16>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16S:
                            tDepth = typeof(Int16);
                            return new Image<Bgr, Int16>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth32S:
                            tDepth = typeof(Int32);
                            return new Image<Bgr, Int32>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth32F:
                            tDepth = typeof(Single);
                            return new Image<Bgr, Single>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth64F:
                            tDepth = typeof(Double);
                            return new Image<Bgr, Double>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                case 4:
                    tColor = typeof(Bgra);
                    switch (mi.Depth)
                    {
                        case Emgu.CV.CvEnum.IplDepth.IplDepth_8U:
                            tDepth = typeof(Byte);
                            return new Image<Bgra, Byte>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16U:
                            tDepth = typeof(UInt16);
                            return new Image<Bgra, UInt16>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16S:
                            tDepth = typeof(Int16);
                            return new Image<Bgra, Int16>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth32S:
                            tDepth = typeof(Int32);
                            return new Image<Bgra, Int32>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth32F:
                            tDepth = typeof(Single);
                            return new Image<Bgra, Single>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        case Emgu.CV.CvEnum.IplDepth.IplDepth64F:
                            tDepth = typeof(Double);
                            return new Image<Bgra, Double>(mi.Width, mi.Height, mi.WidthStep, mi.ImageData);
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                default:
                    throw new NotImplementedException(unsupportedChannels);
            }
        }
        /// <summary>
        /// 将Emgucv中的Image对象转换成IplImage指针;
        /// </summary>
        /// <typeparam name="TColor">Color type of this image (either Gray, Bgr, Bgra, Hsv, Hls, Lab, Luv, Xyz or Ycc)</typeparam>
        /// <typeparam name="TDepth">Depth of this image (either Byte, SByte, Single, double, UInt16, Int16 or Int32)</typeparam>
        /// <param name="image">Image对象</param>
        /// <returns>返回IplImage指针</returns>
        public static IntPtr EmgucvImageToIplImagePointer<TColor, TDepth>(Image<TColor, TDepth> image)
            where TColor : struct, IColor
            where TDepth : new()
        {
            return image.Ptr;
        }

        #endregion

        #region  实现 Image<TColor,TDepth>  <=>  Bitmap
        //EmguCv中已经实现了这二者之间的转换,分别是Image<TColor,TDepth>类的下列成员:
        //(1)public Bitmap Bitmap { get; set; }
        //该属性可以获取或者设置位图;对于Image<Gray, Byte>, Image<Bgr, Byte> 和 Image<Bgra, Byte>这三种情况效率很高,因为Image<TColor,TDepth>和Bitmap共享数据内存。
        //(2)public Bitmap ToBitmap(int width,int height)及public Bitmap ToBitmap()方法
        //(3)public Image(Bitmap bmp)
        //(4)public Image(int width,int height,int stride,IntPtr scan0)
        //这个构造函数几乎是万能的,只要您清楚图像的内存分布,以及想要的目的。

        /// <summary>
        /// 将IplImage指针转换成位图对象;
        /// 对于不支持的像素格式,可以先使用cvCvtColor函数转换成支持的图像指针
        /// </summary>
        /// <param name="ptr">IplImage指针</param>
        /// <returns>返回位图对象</returns>
        public static Bitmap IplImagePointerToBitmap(IntPtr ptr)
        {
            MIplImage mi = IplImagePointerToMIplImage(ptr);
            PixelFormat pixelFormat;    //像素格式
            string unsupportedDepth = "不支持的像素位深度IPL_DEPTH";
            string unsupportedChannels = "不支持的通道数(仅支持1,2,4通道)";
            switch (mi.NChannels)
            {
                case 1:
                    switch (mi.Depth)
                    {
                        case Emgu.CV.CvEnum.IplDepth.IplDepth_8U:
                            pixelFormat = PixelFormat.Format8bppIndexed;
                            break;
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16U:
                            pixelFormat = PixelFormat.Format16bppGrayScale;
                            break;
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                    break;
                case 3:
                    switch (mi.Depth)
                    {
                        case Emgu.CV.CvEnum.IplDepth.IplDepth_8U:
                            pixelFormat = PixelFormat.Format24bppRgb;
                            break;
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16U:
                            pixelFormat = PixelFormat.Format48bppRgb;
                            break;
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                    break;
                case 4:
                    switch (mi.Depth)
                    {
                        case Emgu.CV.CvEnum.IplDepth.IplDepth_8U:
                            pixelFormat = PixelFormat.Format32bppArgb;
                            break;
                        case Emgu.CV.CvEnum.IplDepth.IplDepth16U:
                            pixelFormat = PixelFormat.Format64bppArgb;
                            break;
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                    break;
                default:
                    throw new NotImplementedException(unsupportedChannels);
            }
            Bitmap bitmap = new Bitmap(mi.Width, mi.Height, mi.WidthStep, pixelFormat, mi.ImageData);
            //对于灰度图像,还要修改调色板
            if (pixelFormat == PixelFormat.Format8bppIndexed)
                SetColorPaletteOfGrayscaleBitmap(bitmap);
            return bitmap;
        }
        /// <summary>
        /// 将位图转换成IplImage指针
        /// </summary>
        /// <param name="bitmap">位图对象</param>
        /// <returns>返回IplImage指针</returns>
        public static IntPtr BitmapToIplImagePointer(Bitmap bitmap)
        {
            IImage iimage = null;
            switch (bitmap.PixelFormat)
            {
                case PixelFormat.Format8bppIndexed:
                    iimage = new Image<Gray, Byte>(bitmap);
                    break;
                case PixelFormat.Format16bppGrayScale:
                    iimage = new Image<Gray, UInt16>(bitmap);
                    break;
                case PixelFormat.Format24bppRgb:
                    iimage = new Image<Bgr, Byte>(bitmap);
                    break;
                case PixelFormat.Format32bppArgb:
                    iimage = new Image<Bgra, Byte>(bitmap);
                    break;
                case PixelFormat.Format48bppRgb:
                    iimage = new Image<Bgr, UInt16>(bitmap);
                    break;
                case PixelFormat.Format64bppArgb:
                    iimage = new Image<Bgra, UInt16>(bitmap);
                    break;
                default:
                    Image<Bgra, Byte> tmp1 = new Image<Bgra, Byte>(bitmap.Size);
                    Byte[,,] data = tmp1.Data;
                    for (int i = 0; i < bitmap.Width; i++)
                    {
                        for (int j = 0; j < bitmap.Height; j++)
                        {
                            Color color = bitmap.GetPixel(i, j);
                            data[j, i, 0] = color.B;
                            data[j, i, 1] = color.G;
                            data[j, i, 2] = color.R;
                            data[j, i, 3] = color.A;
                        }
                    }
                    iimage = tmp1;
                    break;
            }
            return iimage.Ptr;
        }
        /// <summary>
        /// 设置256级灰度位图的调色板
        /// </summary>
        /// <param name="bitmap"></param>
        public static void SetColorPaletteOfGrayscaleBitmap(Bitmap bitmap)
        {
            PixelFormat pixelFormat = bitmap.PixelFormat;
            if (pixelFormat == PixelFormat.Format8bppIndexed)
            {
                ColorPalette palette = bitmap.Palette;
                for (int i = 0; i < palette.Entries.Length; i++)
                    palette.Entries[i] = Color.FromArgb(255, i, i, i);
                bitmap.Palette = palette;
            }
        }
        /// <summary>
        /// 将EmguCV中的mat类图像转换成IplImage指针
        /// </summary>
        /// <param name="srcImg">Mat对象</param>
        /// <returns>返回IplImage指针</returns>
        public static IntPtr EmguCVMatToIplImagePointer(Mat srcImg)
        {
            return EmgucvImageToIplImagePointer<Bgr, byte>(srcImg.ToImage<Bgr, byte>());
        }
        /// <summary>
        /// 将IplImage指针转换成EmguCV中的mat类图像
        /// </summary>
        /// <param name="ptr"></param>
        /// <returns></returns>
        public static Mat IplImagePointerToEmgucvMat(IntPtr ptr)
        {
            Mat dstImg = new Mat();
            Image<Bgr, byte> imgTmp = IplImagePointerToEmgucvImage<Bgr, byte>(ptr);
            CvInvoke.BitwiseAnd(imgTmp, imgTmp, dstImg);
            return dstImg;
        }
        #endregion

        private void timer1_Tick(object sender, EventArgs e)
        {
            //lbl_time.Text = DateTime.Now.ToString("dd hh:mm:ss :fff");
        }

        private void button2_Click(object sender, EventArgs e)
        {
            Thread t = new Thread(MatDeal);
            t.IsBackground = true;
            t.Start();
        }
    }
}

要在C#调用C++编写的OpenCV dll,可以使用Platform Invoke(P/Invoke)技术。下面是一个简单的示例,演示如何在C#调用C++编写的OpenCV dll: 1. 创建一个新的C#控制台应用程序。 2. 在项目文件夹中创建一个名为“opencv”的子文件夹。 3. 将OpenCV dll文件复制到该子文件夹中。 4. 在Visual Studio中打开项目,并添加以下代码: ```C# using System; using System.Runtime.InteropServices; class Program { [DllImport("opencv\\opencv_core320.dll", CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr cvCreateImage( [MarshalAs(UnmanagedType.Struct)] CvSize size, int depth, int channels); static void Main(string[] args) { // 创建一个256x256的8位单通道图像 var size = new CvSize(256, 256); var image = cvCreateImage(size, 8, 1); // 在控制台中输出图像信息 Console.WriteLine("Image created: {0}x{1}, depth={2}, channels={3}", size.Width, size.Height, 8, 1); Console.ReadKey(); } } [StructLayout(LayoutKind.Sequential)] public struct CvSize { public int Width; public int Height; public CvSize(int width, int height) { Width = width; Height = height; } } ``` 上面的代码创建了一个256x256的8位单通道图像,并在控制台中输出了图像信息。 在上面的代码中,我们使用DllImport属性来指定要导入的OpenCV dll的名称和调用约定。在本例中,我们使用Cdecl调用约定。 我们还定义了一个结构体CvSize,用于传递图像大小参数。在C++中,CvSize结构体定义在opencv_core.hpp头文件中。我们在C#中重新定义了这个结构体,以便我们可以在C#中使用它来传递参数。 需要注意的是,由于C++C#使用不同的内存管理机制,因此您需要确保在C#中正确处理从C++返回的指针。在本例中,我们使用IntPtr类型来表示从C++返回的指针,并使用Marshal类中的相关方法来管理它们。
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值