自己积累的一些Emgu CV代码(主要有图片格式转换,图片裁剪,图片翻转,图片旋转和图片平移等功能)

2023-07-29,,

using System;
using System.Drawing;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure; namespace ZNLGIS
{
public class ImageClass
{
//图片裁剪
public static Image<Bgr, Byte> Cut(Image<Bgr,Byte> image ,Rectangle rectangle)
{
System.Drawing.Size roisize = new Size(260,380);
IntPtr dst = CvInvoke.cvCreateImage(roisize, Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3);
CvInvoke.cvSetImageROI(image.Ptr, rectangle);
CvInvoke.cvCopy(image.Ptr, dst, IntPtr.Zero); return OpenCVEmguCVDotNet.IplImagePointerToEmgucvImage<Bgr, Byte>(dst);
}
//图片裁剪
public static Image<Bgr, Byte> Cut2(Image<Bgr,Byte> image,int oldwidth,int oldheight)
{
int x = image.Width - oldwidth;
int y = image.Height - oldheight;
System.Drawing.Size roisize = new System.Drawing.Size(oldwidth, oldheight); //要裁剪的图片大小
IntPtr dst = CvInvoke.cvCreateImage(roisize, Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3);
System.Drawing.Rectangle rect = new System.Drawing.Rectangle(x/2, y/2, oldwidth, oldheight);
CvInvoke.cvSetImageROI(image.Ptr, rect);
CvInvoke.cvCopy(image.Ptr, dst, IntPtr.Zero); return OpenCVEmguCVDotNet.IplImagePointerToEmgucvImage<Bgr, Byte>(dst);
}
//图片翻转
public static Image<Bgr, Byte> FlipImage(Image<Bgr, Byte> image, bool isHorizontal)
{
if (isHorizontal)
{
CvInvoke.cvFlip(image.Ptr, IntPtr.Zero, FLIP.HORIZONTAL);
}
else
{
CvInvoke.cvFlip(image.Ptr, IntPtr.Zero, FLIP.VERTICAL);
} return image;
}
//图片旋转
public static Image<Bgr, Byte> RotateImage(Image<Bgr, Byte> image_old, double angle, bool clockwise)
{
IntPtr image_temp; double anglerad = Math.PI * (angle / 180);
int newwidth = (int)Math.Abs(image_old.Bitmap.Height * Math.Sin(anglerad)) +
(int)Math.Abs(image_old.Bitmap.Width * Math.Cos(anglerad)) + 1;
int newheight = (int)Math.Abs(image_old.Bitmap.Height * Math.Cos(anglerad)) +
(int)Math.Abs(image_old.Bitmap.Width * Math.Sin(anglerad)) + 1; image_temp = CvInvoke.cvCreateImage(new Size(newwidth, newheight), IPL_DEPTH.IPL_DEPTH_8U, 3);
CvInvoke.cvZero(image_temp);
int flag = -1; if (clockwise)
{
flag = 1;
} float[] m = new float[6];
int w = image_old.Bitmap.Width;
int h = image_old.Bitmap.Height;
m[0] = (float)Math.Cos(flag * angle * Math.PI / 180);
m[1] = (float)Math.Sin(flag * angle * Math.PI / 180);
m[3] = -m[1];
m[4] = m[0]; m[2] = w * 0.5f;
m[5] = h * 0.5f; unsafe
{
void* p;
IntPtr ptr;
fixed (float* pc = m)
{
p = (void*)pc;
ptr = new IntPtr(p);
} IntPtr M = CvInvoke.cvMat(2, 3, MAT_DEPTH.CV_32F, ptr);
CvInvoke.cvGetQuadrangleSubPix(image_old.Ptr,image_temp,M);
} return OpenCVEmguCVDotNet.IplImagePointerToEmgucvImage<Bgr, Byte>(image_temp);
}
//图片平移
public static Image<Bgr, Byte> Py(Image<Bgr, Byte> src,int x,int y)
{
System.Drawing.Size roisize = new Size(src.Width, src.Height); Image<Bgr, Byte> dst = new Image<Bgr, byte>(src.Width, src.Height, new Bgr(Color.Transparent)); int i, j;
int w = src.Width;
int h = src.Height; if (x >= 0 && y >= 0)
{
for (i = 0; i < w - x; i++)
{
for (j = 0; j < h - y; j++)
{
CvInvoke.cvSet2D(dst, j + y, i + x, CvInvoke.cvGet2D(src, j, i));
}
}
}
else if (x >= 0 && y < 0)
{
for (i = 0; i < w - x; i++)
{
for (j = -y; j < h; j++)
{
CvInvoke.cvSet2D(dst, j + y, i + x, CvInvoke.cvGet2D(src, j, i));
}
}
}
else if (x < 0 && y >= 0)
{
for (i = -x; i < w; i++)
{
for (j = 0; j < h - y; j++)
{
CvInvoke.cvSet2D(dst, j + y, i + x, CvInvoke.cvGet2D(src, j, i));
}
}
}
else
{
for (i = -x; i < w; i++)
{
for (j = -y; j < h; j++)
{
CvInvoke.cvSet2D(dst, j + y, i + x, CvInvoke.cvGet2D(src, j, i));
}
}
} return OpenCVEmguCVDotNet.IplImagePointerToEmgucvImage<Bgr, Byte>(dst); }
}
}
using System;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure; namespace ZNLGIS
{
public class OpenCVEmguCVDotNet
{
/// <summary>
/// 将MIplImage结构转换到IplImage指针;
/// 注意:指针在使用完之后必须用Marshal.FreeHGlobal方法释放。
/// </summary>
/// <param name="mi">MIplImage对象</param>
/// <returns>返回IplImage指针</returns>
public static IntPtr MIplImageToIplImagePointer(MIplImage mi)
{
IntPtr ptr = Marshal.AllocHGlobal(mi.nSize);
Marshal.StructureToPtr(mi, ptr, false);
return ptr;
} /// <summary>
/// 将IplImage指针转换成MIplImage结构
/// </summary>
/// <param name="ptr">IplImage指针</param>
/// <returns>返回MIplImage结构</returns>
public static MIplImage IplImagePointerToMIplImage(IntPtr ptr)
{
return (MIplImage)Marshal.PtrToStructure(ptr, typeof(MIplImage));
} /// <summary>
/// 将IplImage指针转换成Emgucv中的Image对象;
/// 注意:这里需要您自己根据IplImage中的depth和nChannels来决定
/// </summary>
/// <typeparam name="TColor">Color type of this image (either Gray, Bgr, Bgra, Hsv, Hls, Lab, Luv, Xyz or Ycc)</typeparam>
/// <typeparam name="TDepth">Depth of this image (either Byte, SByte, Single, double, UInt16, Int16 or Int32)</typeparam>
/// <param name="ptr">IplImage指针</param>
/// <returns>返回Image对象</returns>
public static Image<TColor, TDepth> IplImagePointerToEmgucvImage<TColor, TDepth>(IntPtr ptr)
where TColor : struct, IColor
where TDepth : new()
{
MIplImage mi = IplImagePointerToMIplImage(ptr);
return new Image<TColor, TDepth>(mi.width, mi.height, mi.widthStep, mi.imageData);
} /// <summary>
/// 将IplImage指针转换成Emgucv中的IImage接口;
/// 1通道对应灰度图像,3通道对应BGR图像,4通道对应BGRA图像。
/// 注意:3通道可能并非BGR图像,而是HLS,HSV等图像
/// </summary>
/// <param name="ptr">IplImage指针</param>
/// <returns>返回IImage接口</returns>
public static IImage IplImagePointToEmgucvIImage(IntPtr ptr)
{
MIplImage mi = IplImagePointerToMIplImage(ptr);
Type tColor;
Type tDepth;
string unsupportedDepth = "不支持的像素位深度IPL_DEPTH";
string unsupportedChannels = "不支持的通道数(仅支持1,2,4通道)";
switch (mi.nChannels)
{
case 1:
tColor = typeof(Gray);
switch (mi.depth)
{
case IPL_DEPTH.IPL_DEPTH_8U:
tDepth = typeof(Byte);
return new Image<Gray, Byte>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_16U:
tDepth = typeof(UInt16);
return new Image<Gray, UInt16>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_16S:
tDepth = typeof(Int16);
return new Image<Gray, Int16>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_32S:
tDepth = typeof(Int32);
return new Image<Gray, Int32>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_32F:
tDepth = typeof(Single);
return new Image<Gray, Single>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_64F:
tDepth = typeof(Double);
return new Image<Gray, Double>(mi.width, mi.height, mi.widthStep, mi.imageData);
default:
throw new NotImplementedException(unsupportedDepth);
}
case 3:
tColor = typeof(Bgr);
switch (mi.depth)
{
case IPL_DEPTH.IPL_DEPTH_8U:
tDepth = typeof(Byte);
return new Image<Bgr, Byte>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_16U:
tDepth = typeof(UInt16);
return new Image<Bgr, UInt16>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_16S:
tDepth = typeof(Int16);
return new Image<Bgr, Int16>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_32S:
tDepth = typeof(Int32);
return new Image<Bgr, Int32>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_32F:
tDepth = typeof(Single);
return new Image<Bgr, Single>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_64F:
tDepth = typeof(Double);
return new Image<Bgr, Double>(mi.width, mi.height, mi.widthStep, mi.imageData);
default:
throw new NotImplementedException(unsupportedDepth);
}
case 4:
tColor = typeof(Bgra);
switch (mi.depth)
{
case IPL_DEPTH.IPL_DEPTH_8U:
tDepth = typeof(Byte);
return new Image<Bgra, Byte>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_16U:
tDepth = typeof(UInt16);
return new Image<Bgra, UInt16>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_16S:
tDepth = typeof(Int16);
return new Image<Bgra, Int16>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_32S:
tDepth = typeof(Int32);
return new Image<Bgra, Int32>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_32F:
tDepth = typeof(Single);
return new Image<Bgra, Single>(mi.width, mi.height, mi.widthStep, mi.imageData);
case IPL_DEPTH.IPL_DEPTH_64F:
tDepth = typeof(Double);
return new Image<Bgra, Double>(mi.width, mi.height, mi.widthStep, mi.imageData);
default:
throw new NotImplementedException(unsupportedDepth);
}
default:
throw new NotImplementedException(unsupportedChannels);
}
} /// <summary>
/// 将Emgucv中的Image对象转换成IplImage指针;
/// </summary>
/// <typeparam name="TColor">Color type of this image (either Gray, Bgr, Bgra, Hsv, Hls, Lab, Luv, Xyz or Ycc)</typeparam>
/// <typeparam name="TDepth">Depth of this image (either Byte, SByte, Single, double, UInt16, Int16 or Int32)</typeparam>
/// <param name="image">Image对象</param>
/// <returns>返回IplImage指针</returns>
public static IntPtr EmgucvImageToIplImagePointer<TColor, TDepth>(Image<TColor, TDepth> image)
where TColor : struct, IColor
where TDepth : new()
{
return image.Ptr;
} /// <summary>
/// 将IplImage指针转换成位图对象;
/// 对于不支持的像素格式,可以先使用cvCvtColor函数转换成支持的图像指针
/// </summary>
/// <param name="ptr">IplImage指针</param>
/// <returns>返回位图对象</returns>
public static Bitmap IplImagePointerToBitmap(IntPtr ptr)
{
MIplImage mi = IplImagePointerToMIplImage(ptr);
PixelFormat pixelFormat; //像素格式
string unsupportedDepth = "不支持的像素位深度IPL_DEPTH";
string unsupportedChannels = "不支持的通道数(仅支持1,2,4通道)";
switch (mi.nChannels)
{
case 1:
switch (mi.depth)
{
case IPL_DEPTH.IPL_DEPTH_8U:
pixelFormat = PixelFormat.Format8bppIndexed;
break;
case IPL_DEPTH.IPL_DEPTH_16U:
pixelFormat = PixelFormat.Format16bppGrayScale;
break;
default:
throw new NotImplementedException(unsupportedDepth);
}
break;
case 3:
switch (mi.depth)
{
case IPL_DEPTH.IPL_DEPTH_8U:
pixelFormat = PixelFormat.Format24bppRgb;
break;
case IPL_DEPTH.IPL_DEPTH_16U:
pixelFormat = PixelFormat.Format48bppRgb;
break;
default:
throw new NotImplementedException(unsupportedDepth);
}
break;
case 4:
switch (mi.depth)
{
case IPL_DEPTH.IPL_DEPTH_8U:
pixelFormat = PixelFormat.Format32bppArgb;
break;
case IPL_DEPTH.IPL_DEPTH_16U:
pixelFormat = PixelFormat.Format64bppArgb;
break;
default:
throw new NotImplementedException(unsupportedDepth);
}
break;
default:
throw new NotImplementedException(unsupportedChannels); }
Bitmap bitmap = new Bitmap(mi.width, mi.height, mi.widthStep, pixelFormat, mi.imageData);
//对于灰度图像,还要修改调色板
if (pixelFormat == PixelFormat.Format8bppIndexed)
SetColorPaletteOfGrayscaleBitmap(bitmap);
return bitmap;
} /// <summary>
/// 将位图转换成IplImage指针
/// </summary>
/// <param name="bitmap">位图对象</param>
/// <returns>返回IplImage指针</returns>
public static IntPtr BitmapToIplImagePointer(Bitmap bitmap)
{
IImage iimage = null;
switch (bitmap.PixelFormat)
{
case PixelFormat.Format8bppIndexed:
iimage = new Image<Gray, Byte>(bitmap);
break;
case PixelFormat.Format16bppGrayScale:
iimage = new Image<Gray, UInt16>(bitmap);
break;
case PixelFormat.Format24bppRgb:
iimage = new Image<Bgr, Byte>(bitmap);
break;
case PixelFormat.Format32bppArgb:
iimage = new Image<Bgra, Byte>(bitmap);
break;
case PixelFormat.Format48bppRgb:
iimage = new Image<Bgr, UInt16>(bitmap);
break;
case PixelFormat.Format64bppArgb:
iimage = new Image<Bgra, UInt16>(bitmap);
break;
default:
Image<Bgra, Byte> tmp1 = new Image<Bgra, Byte>(bitmap.Size);
Byte[, ,] data = tmp1.Data;
for (int i = 0; i < bitmap.Width; i++)
{
for (int j = 0; j < bitmap.Height; j++)
{
Color color = bitmap.GetPixel(i, j);
data[j, i, 0] = color.B;
data[j, i, 1] = color.G;
data[j, i, 2] = color.R;
data[j, i, 3] = color.A;
}
}
iimage = tmp1;
break;
}
return iimage.Ptr;
} /// <summary>
/// 设置256级灰度位图的调色板
/// </summary>
/// <param name="bitmap"></param>
public static void SetColorPaletteOfGrayscaleBitmap(Bitmap bitmap)
{
PixelFormat pixelFormat = bitmap.PixelFormat;
if (pixelFormat == PixelFormat.Format8bppIndexed)
{
ColorPalette palette = bitmap.Palette;
for (int i = 0; i < palette.Entries.Length; i++)
palette.Entries[i] = Color.FromArgb(255, i, i, i);
bitmap.Palette = palette;
}
}
}
}

自己积累的一些Emgu CV代码(主要有图片格式转换,图片裁剪,图片翻转,图片旋转和图片平移等功能)的相关教程结束。

《自己积累的一些Emgu CV代码(主要有图片格式转换,图片裁剪,图片翻转,图片旋转和图片平移等功能).doc》

下载本文的Word格式文档,以方便收藏与打印。