您的位置:首页 > 运维架构

OpenCv,EmguCv及.net之间的互动(The Interaction of OpenCv, EmguCv AND .net)

2016-05-17 10:32 337 查看
http://www.cnblogs.com/xrwang/archive/2010/01/26/TheInteractionOfOpenCv-EmguCvANDDotNet.html

前言

在.net中使用OpenCv和EmguCv时,必须在三者支持的图像格式之间进行转换。.net中用Bitmap类来承载图像,OpenCv中用IplImage指针来承载图像,EmguCv中用Image<TColor,TDepth>来承载图像。本文主要讲述如何在IplImage、Image<TColor,TDepth>和Bitmap之间转换。

IplImage<=>MIplImage

MIplImage是IplImage中的托管实现,它是.net与OpenCv之间沟通的桥梁。IplImage指针和MIplImage之间的转换主要用到了Marshal类中的PtrToStructure、StructureToPtr、AllocHGlobal和FreeHGlobal这几个静态方法。

需要注意的是,不能使用MIplImage*pmi=(MIplImage*)ptr.ToPointer();和IntPtrptr=&mi;之类的写法。

IplImage<=>Image<TColor,TDepth>

用了MIplImage的辅助,我们可以很容易实现IplImage指针和Image<TColor,TDepth>之间的转换。

///<summary> ///将IplImage指针转换成Emgucv中的Image对象; ///注意:这里需要您自己根据IplImage中的depth和nChannels来决定 ///</summary> ///<typeparamname="TColor">Colortypeofthisimage(eitherGray,Bgr,Bgra,Hsv,Hls,Lab,Luv,XyzorYcc)</typeparam> ///<typeparamname="TDepth">Depthofthisimage(eitherByte,SByte,Single,double,UInt16,Int16orInt32)</typeparam> ///<paramname="ptr">IplImage指针</param> ///<returns>返回Image对象</returns> publicstaticImage<TColor,TDepth>IplImagePointerToEmgucvImage<TColor,TDepth>(IntPtrptr) whereTColor:struct,IColor whereTDepth:new() { MIplImagemi=IplImagePointerToMIplImage(ptr); returnnewImage<TColor,TDepth>(mi.width,mi.height,mi.widthStep,mi.imageData); } ///<summary> ///将IplImage指针转换成Emgucv中的IImage接口; ///1通道对应灰度图像,3通道对应BGR图像,4通道对应BGRA图像。 ///注意:3通道可能并非BGR图像,而是HLS,HSV等图像 ///</summary> ///<paramname="ptr">IplImage指针</param> ///<returns>返回IImage接口</returns> publicstaticIImageIplImagePointToEmgucvIImage(IntPtrptr) { MIplImagemi=IplImagePointerToMIplImage(ptr); TypetColor; TypetDepth; stringunsupportedDepth="不支持的像素位深度IPL_DEPTH"; stringunsupportedChannels="不支持的通道数(仅支持1,2,4通道)"; switch(mi.nChannels) { case1: tColor=typeof(Gray); switch(mi.depth) { caseIPL_DEPTH.IPL_DEPTH_8U: tDepth=typeof(Byte); returnnewImage<Gray,Byte>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_16U: tDepth=typeof(UInt16); returnnewImage<Gray,UInt16>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_16S: tDepth=typeof(Int16); returnnewImage<Gray,Int16>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_32S: tDepth=typeof(Int32); returnnewImage<Gray,Int32>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_32F: tDepth=typeof(Single); returnnewImage<Gray,Single>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_64F: tDepth=typeof(Double); returnnewImage<Gray,Double>(mi.width,mi.height,mi.widthStep,mi.imageData); default: thrownewNotImplementedException(unsupportedDepth); } case3: tColor=typeof(Bgr); switch(mi.depth) { caseIPL_DEPTH.IPL_DEPTH_8U: tDepth=typeof(Byte); returnnewImage<Bgr,Byte>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_16U: tDepth=typeof(UInt16); returnnewImage<Bgr,UInt16>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_16S: tDepth=typeof(Int16); returnnewImage<Bgr,Int16>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_32S: tDepth=typeof(Int32); returnnewImage<Bgr,Int32>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_32F: tDepth=typeof(Single); returnnewImage<Bgr,Single>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_64F: tDepth=typeof(Double); returnnewImage<Bgr,Double>(mi.width,mi.height,mi.widthStep,mi.imageData); default: thrownewNotImplementedException(unsupportedDepth); } case4: tColor=typeof(Bgra); switch(mi.depth) { caseIPL_DEPTH.IPL_DEPTH_8U: tDepth=typeof(Byte); returnnewImage<Bgra,Byte>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_16U: tDepth=typeof(UInt16); returnnewImage<Bgra,UInt16>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_16S: tDepth=typeof(Int16); returnnewImage<Bgra,Int16>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_32S: tDepth=typeof(Int32); returnnewImage<Bgra,Int32>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_32F: tDepth=typeof(Single); returnnewImage<Bgra,Single>(mi.width,mi.height,mi.widthStep,mi.imageData); caseIPL_DEPTH.IPL_DEPTH_64F: tDepth=typeof(Double); returnnewImage<Bgra,Double>(mi.width,mi.height,mi.widthStep,mi.imageData); default: thrownewNotImplementedException(unsupportedDepth); } default: thrownewNotImplementedException(unsupportedChannels); } } ///<summary> ///将Emgucv中的Image对象转换成IplImage指针; ///</summary> ///<typeparamname="TColor">Colortypeofthisimage(eitherGray,Bgr,Bgra,Hsv,Hls,Lab,Luv,XyzorYcc)</typeparam> ///<typeparamname="TDepth">Depthofthisimage(eitherByte,SByte,Single,double,UInt16,Int16orInt32)</typeparam> ///<paramname="image">Image对象</param> ///<returns>返回IplImage指针</returns> publicstaticIntPtrEmgucvImageToIplImagePointer<TColor,TDepth>(Image<TColor,TDepth>image) whereTColor:struct,IColor whereTDepth:new() { returnimage.Ptr;//IplImage*,CvCapture*等指针在C#中都用IntPtr来代替,且其中没有cvGetMCvSize函数,故用cvGetImageROI来暂时代替 }
Image<TColor,TDepth><=>Bitmap

EmguCv中已经实现了这二者之间的转换,分别是Image<TColor,TDepth>类的下列成员:

(1)publicBitmapBitmap{get;set;}

该属性可以获取或者设置位图;对于Image<Gray,Byte>,Image<Bgr,Byte>和Image<Bgra,Byte>这三种情况效率很高,因为Image<TColor,TDepth>和Bitmap共享数据内存。

(2)publicBitmapToBitmap(intwidth,intheight)及publicBitmapToBitmap()方法

(3)publicImage(Bitmapbmp)

(4)publicImage(intwidth,intheight,intstride,IntPtrscan0)

这个构造函数几乎是万能的,只要您清楚图像的内存分布,以及想要的目的。


IplImage<=>Bitmap

IplImage指针和Bitmap间的转换有两种方式,第一种是利用Image<TColor,TDepth>作媒介;第二种是自己写转换的方法,例如我写的下列代码:

///<summary>
///将IplImage指针转换成位图对象;
///对于不支持的像素格式,可以先使用cvCvtColor函数转换成支持的图像指针
///</summary>
///<paramname="ptr">IplImage指针</param>
///<returns>返回位图对象</returns>
publicstaticBitmapIplImagePointerToBitmap(IntPtrptr)
{
MIplImagemi=IplImagePointerToMIplImage(ptr);
PixelFormatpixelFormat;//像素格式
stringunsupportedDepth="不支持的像素位深度IPL_DEPTH";
stringunsupportedChannels="不支持的通道数(仅支持1,2,4通道)";
switch(mi.nChannels)
{
case1:
switch(mi.depth)
{
caseIPL_DEPTH.IPL_DEPTH_8U:
pixelFormat=PixelFormat.Format8bppIndexed;
break;
caseIPL_DEPTH.IPL_DEPTH_16U:
pixelFormat=PixelFormat.Format16bppGrayScale;
break;
default:
thrownewNotImplementedException(unsupportedDepth);
}
break;
case3:
switch(mi.depth)
{
caseIPL_DEPTH.IPL_DEPTH_8U:
pixelFormat=PixelFormat.Format24bppRgb;
break;
caseIPL_DEPTH.IPL_DEPTH_16U:
pixelFormat=PixelFormat.Format48bppRgb;
break;
default:
thrownewNotImplementedException(unsupportedDepth);
}
break;
case4:
switch(mi.depth)
{
caseIPL_DEPTH.IPL_DEPTH_8U:
pixelFormat=PixelFormat.Format32bppArgb;
break;
caseIPL_DEPTH.IPL_DEPTH_16U:
pixelFormat=PixelFormat.Format64bppArgb;
break;
default:
thrownewNotImplementedException(unsupportedDepth);
}
break;
default:
thrownewNotImplementedException(unsupportedChannels);

}
Bitmapbitmap=newBitmap(mi.width,mi.height,mi.widthStep,pixelFormat,mi.imageData);
//对于灰度图像,还要修改调色板
if(pixelFormat==PixelFormat.Format8bppIndexed)
SetColorPaletteOfGrayscaleBitmap(bitmap);
returnbitmap;
}

///<summary>
///将位图转换成IplImage指针
///</summary>
///<paramname="bitmap">位图对象</param>
///<returns>返回IplImage指针</returns>
publicstaticIntPtrBitmapToIplImagePointer(Bitmapbitmap)
{
IImageiimage=null;
switch(bitmap.PixelFormat)
{
casePixelFormat.Format8bppIndexed:
iimage=newImage<Gray,Byte>(bitmap);
break;
casePixelFormat.Format16bppGrayScale:
iimage=newImage<Gray,UInt16>(bitmap);
break;
casePixelFormat.Format24bppRgb:
iimage=newImage<Bgr,Byte>(bitmap);
break;
casePixelFormat.Format32bppArgb:
iimage=newImage<Bgra,Byte>(bitmap);
break;
casePixelFormat.Format48bppRgb:
iimage=newImage<Bgr,UInt16>(bitmap);
break;
casePixelFormat.Format64bppArgb:
iimage=newImage<Bgra,UInt16>(bitmap);
break;
default:
Image<Bgra,Byte>tmp1=newImage<Bgra,Byte>(bitmap.Size);
Byte[,,]data=tmp1.Data;
for(inti=0;i<bitmap.Width;i++)
{
for(intj=0;j<bitmap.Height;j++)
{
Colorcolor=bitmap.GetPixel(i,j);
data[j,i,0]=color.B;
data[j,i,1]=color.G;
data[j,i,2]=color.R;
data[j,i,3]=color.A;
}
}
iimage=tmp1;
break;
}
returniimage.Ptr;
}

///<summary>
///设置256级灰度位图的调色板
///</summary>
///<paramname="bitmap"></param>
publicstaticvoidSetColorPaletteOfGrayscaleBitmap(Bitmapbitmap)
{
PixelFormatpixelFormat=bitmap.PixelFormat;
if(pixelFormat==PixelFormat.Format8bppIndexed)
{
ColorPalettepalette=bitmap.Palette;
for(inti=0;i<palette.Entries.Length;i++)
palette.Entries[i]=Color.FromArgb(255,i,i,i);
bitmap.Palette=palette;
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: