using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Drawing; using System.Linq; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Text; using System.Threading; using System.Threading.Tasks; using Cognex.VisionPro; using LampInspectionMachine.Interfaces; using LampInspectionMachine.Log4xml; using GdiPlus = System.Drawing.Imaging; using LampInspectionMachine.Model; using Microsoft.Win32; using MvCameraControl; using SciCamera.Net; using static System.Windows.Forms.AxHost; using static SciCamera.Net.SciCam; using MvCamCtrl.NET; namespace LampInspectionMachine.Cameralibs.OPTCamera { public class MVOptCamera : ICamera, INotifyPropertyChanged { #region 字段 private Thread m_hReceiveThread; #endregion #region 属性 public CameraBrand CameraBrand { get => CameraBrand.HikRobot_MVS; } public string Name { get; private set; } public Guid ID { get; private set; } public string ManufacturerName { get; private set; } public string ModelName { get; private set; } public string SerialNumber { get; private set; } public CameraType CameraType { get; private set; } public SCI_DEVICE_INFO CameraInfo; public SciCam.SCI_DEVICE_INFO_LIST m_stDevList = new SciCam.SCI_DEVICE_INFO_LIST(); //ch:设备列表 | en:Device List public SciCam m_currentDev = new SciCam(); public UInt32 ImageWidth { get; private set; } public UInt32 ImageHeight { get; private set; } public MvGvspPixelType PixelType { get; private set; } private bool _IsGrabbing; /// /// 正在采集 /// public bool IsGrabbing { get { return _IsGrabbing; } private set { SetProperty(ref _IsGrabbing, value); } } private ICogImage _Image; public ICogImage Image { get { return _Image; } private set { SetProperty(ref _Image, value); } } public bool IsConnected { get; private set; } private bool IsHaveCamera = false; /// /// 采集用时 /// public TimeSpan TotalTime { get; private set; } /// /// 错误信息 /// public string ErrorMessage { get; private set; } #endregion #region 事件 /// /// 手动采集图像回调事件 /// public event Action ImageCallbackEvent; /// /// 触发取图回调事件 /// public event Action GrabImageCallbackEvent; public event Action CameraConnectChangedEvent; #endregion public MVOptCamera(Guid _ID, string _Name, string _SerialNumber) { ID = _ID; Name = _Name; SerialNumber = _SerialNumber; uint nReVal = SciCam.DiscoveryDevices(ref m_stDevList, (uint)(SciCam.SciCamTLType.SciCam_TLType_Gige) | (uint)(SciCam.SciCamTLType.SciCam_TLType_Usb3)); if (nReVal != SciCam.SCI_CAMERA_OK) { throw new Exception("Discovery devices failed!"); } if (m_stDevList.count == 0) { throw new Exception("Discovery devices Success, but found 0 device."); } string chDeviceName; IsHaveCamera = false; foreach (var devInfo in m_stDevList.pDevInfo) { SciCam.SCI_DEVICE_INFO device = devInfo; SciCam.SciCamTLType devTlType = device.tlType; SciCam.SciCamDeviceType devType = device.devType; if (devTlType == SciCam.SciCamTLType.SciCam_TLType_Usb3) { SciCam.SCI_DEVICE_USB3_INFO usbinfo = (SciCam.SCI_DEVICE_USB3_INFO)SciCam.ByteToStruct(device.info.usb3Info, typeof(SciCam.SCI_DEVICE_USB3_INFO)); if (!string.IsNullOrEmpty(usbinfo.userDefineName)) { chDeviceName = string.Format("{0} [{1}]", usbinfo.modelName, usbinfo.userDefineName); } else { chDeviceName = string.Format("{0} [{1}]", usbinfo.modelName, usbinfo.serialNumber); } if (string.Equals(usbinfo.serialNumber, _SerialNumber)) { ManufacturerName = usbinfo.manufactureName; ModelName = usbinfo.modelName; SerialNumber = usbinfo.serialNumber; CameraType = CameraType.USB; CameraInfo = device; IsHaveCamera = true; break; } } } } #region 方法 /// /// 获取设备 /// /// public static CameraInfo[] GetDevices() { List cameras = new List(); try { // ch: 初始化 SDK | en: Initialize SDK SDKSystem.Initialize(); List devInfoList = new List(); // ch:枚举设备 | en:Enum device int nRet = DeviceEnumerator.EnumDevices(DeviceTLayerType.MvGigEDevice | DeviceTLayerType.MvVirGigEDevice | DeviceTLayerType.MvGenTLGigEDevice | DeviceTLayerType.MvUsbDevice | DeviceTLayerType.MvVirUsbDevice, out devInfoList); if (nRet != MvError.MV_OK) { throw new Exception($"Enumerate devices fail: {nRet:x8}"); } foreach (var devInfo in devInfoList) { if (devInfo.TLayerType == DeviceTLayerType.MvGigEDevice || devInfo.TLayerType == DeviceTLayerType.MvVirGigEDevice || devInfo.TLayerType == DeviceTLayerType.MvGenTLGigEDevice) { IGigEDeviceInfo gigeDevInfo = devInfo as IGigEDeviceInfo; uint nIp1 = ((gigeDevInfo.CurrentIp & 0xff000000) >> 24); uint nIp2 = ((gigeDevInfo.CurrentIp & 0x00ff0000) >> 16); uint nIp3 = ((gigeDevInfo.CurrentIp & 0x0000ff00) >> 8); uint nIp4 = (gigeDevInfo.CurrentIp & 0x000000ff); Console.WriteLine("DevIP: {0}.{1}.{2}.{3}", nIp1, nIp2, nIp3, nIp4); cameras.Add(new CameraInfo() { CameraName = "", CameraBrand = CameraBrand.HikRobot_MVS, CameraType = CameraType.GIGE, Id = Guid.NewGuid(), ManufacturerName = devInfo.ManufacturerName, Model = devInfo.ModelName, SerialNumber = devInfo.SerialNumber, CameraIp = $"{nIp1}.{nIp2}.{nIp3}.{nIp4}", }); } else if (devInfo.TLayerType == DeviceTLayerType.MvUsbDevice || devInfo.TLayerType == DeviceTLayerType.MvVirUsbDevice) { cameras.Add(new CameraInfo() { CameraName = "", CameraBrand = CameraBrand.HikRobot_MVS, CameraType = CameraType.USB, Id = Guid.NewGuid(), ManufacturerName = devInfo.ManufacturerName, Model = devInfo.ModelName, SerialNumber = devInfo.SerialNumber, CameraIp = "", }); } } } catch (Exception ex) { // LogHelper.WriteLogError("搜索海康相机列表时出错!", ex); } return cameras.ToArray(); } /// /// 打开相机 /// /// /// public bool OpenDevice() { if (!IsHaveCamera) { SCI_DEVICE_INFO_LIST devInfoList = new SCI_DEVICE_INFO_LIST(); // ch:枚举设备 | en:Enum device uint nReVal = SciCam.DiscoveryDevices(ref devInfoList, (uint)(SciCam.SciCamTLType.SciCam_TLType_Gige) | (uint)(SciCam.SciCamTLType.SciCam_TLType_Usb3)); if (nReVal != SciCam.SCI_CAMERA_OK) { throw new Exception("Discovery devices failed!"); } string chDeviceName; foreach (var devInfo in devInfoList.pDevInfo) { SciCam.SCI_DEVICE_INFO device = devInfo; SciCam.SciCamTLType devTlType = device.tlType; SciCam.SciCamDeviceType devType = device.devType; if (devTlType == SciCam.SciCamTLType.SciCam_TLType_Usb3) { SciCam.SCI_DEVICE_USB3_INFO usbinfo = (SciCam.SCI_DEVICE_USB3_INFO)SciCam.ByteToStruct(device.info.usb3Info, typeof(SciCam.SCI_DEVICE_USB3_INFO)); if (!string.IsNullOrEmpty(usbinfo.userDefineName)) { chDeviceName = string.Format("{0} [{1}]", usbinfo.modelName, usbinfo.userDefineName); } else { chDeviceName = string.Format("{0} [{1}]", usbinfo.modelName, usbinfo.serialNumber); } if (string.Equals(usbinfo.serialNumber, SerialNumber)) { ManufacturerName = usbinfo.manufactureName; ModelName = usbinfo.modelName; SerialNumber = usbinfo.serialNumber; CameraType = CameraType.GIGE; CameraInfo = device; IsHaveCamera = true; break; } } } IsConnected = false; if (!IsHaveCamera) { throw new Exception("没有发现相机"); } } if (!IsConnected) { uint nReVal = SciCam.SCI_CAMERA_OK; for (int i = 0; i < m_stDevList.count; i++) { SciCam.SCI_DEVICE_USB3_INFO usbinfo = (SciCam.SCI_DEVICE_USB3_INFO)SciCam.ByteToStruct(m_stDevList.pDevInfo[i].info.usb3Info, typeof(SciCam.SCI_DEVICE_USB3_INFO)); if (SerialNumber == usbinfo.serialNumber) { nReVal = m_currentDev.CreateDevice(ref m_stDevList.pDevInfo[i]); if (nReVal == SciCam.SCI_CAMERA_OK) { nReVal = m_currentDev.OpenDevice(); if (nReVal != SciCam.SCI_CAMERA_OK) { IsConnected = false; m_currentDev.DeleteDevice(); LogHelper.Info("Open device failed"+ nReVal); return false; } IsConnected = true; break; } else { IsConnected = false; } } } } return IsConnected; } /// /// 关闭相机 /// public void CloseDevice() { if (IsGrabbing) { StopGrabbing(); } if (!IsConnected) return; uint nReVal = m_currentDev.CloseDevice(); if (nReVal != SciCam.SCI_CAMERA_OK) { LogHelper.Info("Close device failed"); return; } nReVal = m_currentDev.DeleteDevice(); if (nReVal != SciCam.SCI_CAMERA_OK) { LogHelper.Info("Delete device failed" + nReVal); } IsGrabbing = false; if (IsConnected) { IsConnected = false; CameraConnectChangedEvent?.Invoke(ID, IsConnected); } } /// /// 取图前的必要操作步骤 /// /// //private Int32 NecessaryOperBeforeGrab() //{ // // ch:取图像宽 | en:Get Iamge Width // IIntValue pcWidth = null; // int nRet = m_MyCamera.Parameters.GetIntValue("Width", out pcWidth); // if (nRet != MvError.MV_OK) // { // return nRet; // } // ImageWidth = (UInt32)pcWidth.CurValue; // // ch:取图像高 | en:Get Iamge Height // IIntValue pcHeight = null; // nRet = m_MyCamera.Parameters.GetIntValue("Height", out pcHeight); // if (nRet != MvError.MV_OK) // { // return nRet; // } // ImageHeight = (UInt32)pcHeight.CurValue; // // ch:取像素格式 | en:Get Pixel Format // IEnumValue pcPixelFormat = null; // nRet = m_MyCamera.Parameters.GetEnumValue("PixelFormat", out pcPixelFormat); // if (nRet != MvError.MV_OK) // { // return nRet; // } // PixelType = (MvCameraControl.MvGvspPixelType)pcPixelFormat.CurEnumEntry.Value; // return MvError.MV_OK; //} /// /// 采集图像 /// /// public ICogImage Grab() { m_currentDev.StartGrabbing(); uint nReVal = SciCam.SCI_CAMERA_OK; IntPtr payload = IntPtr.Zero; nReVal = m_currentDev.Grab(ref payload); if (nReVal == SciCam.SCI_CAMERA_OK) { int reVal = GetConvertedInfo(payload); } m_currentDev.StopGrabbing(); return Image; } /// /// 开始采集图像 /// public void StartGrabbing() { if (IsGrabbing) return; IsGrabbing = true; m_hReceiveThread = new Thread(GetStreamThreadProc) { IsBackground = true }; m_hReceiveThread.Start(); } /// /// 开始触发模式采集图像 /// public void StartTriggerGrabbing() { StartGrabbing(); } /// /// 停止采集图像 /// public void StopGrabbing() { try { if (IsGrabbing) { IsGrabbing = false; Thread.Sleep(1000); } if (m_hReceiveThread != null) { m_hReceiveThread.Abort(); m_hReceiveThread = null; } } catch (Exception) { } } //private void FrameGrabedEventHandler(object sender, FrameGrabbedEventArgs e) //{ // GrabImageCallbackEvent?.Invoke(AnalyticImage(e.FrameOut)); // //Console.WriteLine("Get one frame: Width[{0}] , Height[{1}] , ImageSize[{2}], FrameNum[{3}]", e.FrameOut.Image.Width, e.FrameOut.Image.Height, e.FrameOut.Image.ImageSize, e.FrameOut.FrameNum); //} /// /// Converts the image data from the specified object into an /// format. /// /// This method processes both color and monochrome images, converting them to a /// compatible format for further analysis. Unsupported pixel formats are not processed, and the method will /// return in such cases. The caller is responsible for ensuring that the parameter is valid and properly initialized. /// The frame output containing the image data to be analyzed and converted. /// An object representing the converted image. Returns if the /// image format is unsupported or if an error occurs during conversion. /// /// 设置曝光时间 /// /// /// public bool SetExposureTime(float ExposureTime) { string[] nodeName = new string[] { "ExposureTime", "ExposureTimeAbs", "ExposureTimeRaw" }; uint nReVal = SciCam.SCI_CAMERA_OK; int iExposure = (int)ExposureTime; for (int i = 0; i < nodeName.Count(); i++) { nReVal = m_currentDev.SetIntValue(nodeName[i], iExposure); if (nReVal != SciCam.SCI_CAMERA_OK) { double dExposure = ExposureTime; nReVal = m_currentDev.SetFloatValue(nodeName[i], dExposure); if (nReVal == SciCam.SCI_CAMERA_OK) { return true; } } } return false; } /// /// 获取曝光时间 /// /// public float GetExposureTime() { string[] nodeName = new string[] { "ExposureTime", "ExposureTimeAbs", "ExposureTimeRaw" }; uint nReVal = SciCam.SCI_CAMERA_OK; for (int i = 0; i < nodeName.Count(); i++) { SciCam.SCI_NODE_VAL_INT iNodeVal = new SciCam.SCI_NODE_VAL_INT(); nReVal = m_currentDev.GetIntValueEx(SciCam.SciCamDeviceXmlType.SciCam_DeviceXml_Camera, nodeName[i], ref iNodeVal); if (nReVal != SciCam.SCI_CAMERA_OK) { SciCam.SCI_NODE_VAL_FLOAT fNodeVal = new SciCam.SCI_NODE_VAL_FLOAT(); nReVal = m_currentDev.GetFloatValueEx(SciCam.SciCamDeviceXmlType.SciCam_DeviceXml_Camera, nodeName[i], ref fNodeVal); if (nReVal == SciCam.SCI_CAMERA_OK) { return (float)fNodeVal.dVal; } } else { return iNodeVal.nVal; } } return 0; } /// /// 设置增益 /// /// /// public bool SetGain(float Gain) { uint nReVal = m_currentDev.SetFloatValue("Gain", Gain); if (nReVal != SciCam.SCI_CAMERA_OK) { return false; } else { return true; } } /// /// 获取增益 /// /// public float GetGain() { SciCam.SCI_NODE_VAL_FLOAT fNodeVal = new SciCam.SCI_NODE_VAL_FLOAT(); uint nReVal = m_currentDev.GetFloatValueEx(SciCam.SciCamDeviceXmlType.SciCam_DeviceXml_Camera, "Gain", ref fNodeVal); if (nReVal == SciCam.SCI_CAMERA_OK) { return (float)fNodeVal.dVal; } return 0; } /// /// ch:获取触发模式 | en:Get Trigger Mode /// /// On/Off public bool GetTriggerMode() { SciCam.SCI_NODE_VAL_ENUM eNodeVal = new SciCam.SCI_NODE_VAL_ENUM(); uint nReVal = m_currentDev.GetEnumValue("TriggerMode", ref eNodeVal); if (nReVal != SciCam.SCI_CAMERA_OK) { return false ; } return true; } /// /// 设置触发模式 /// /// 触发模式 /// 触发源0 - Line0;1 - Line1;2 - Line2;3 - Line3;4 - Counter;7 - Software; /// public bool SetTriggerMode(bool mode, int triggerSource) { string strmode = mode ? "On" : "Off"; if (m_currentDev.IsDeviceOpen()) { uint nRet = m_currentDev.SetEnumValueByStringEx(SciCam.SciCamDeviceXmlType.SciCam_DeviceXml_Camera, "TriggerMode", strmode); if (nRet != MvError.MV_OK) { return false; } if (mode) { return SetTriggerSource(triggerSource); } else { } return true; } return false; } /// /// Sets the trigger source for the camera. /// /// 0 - Line0;1 - Line1;2 - Line2;3 - Line3;4 - Counter;7 - Software; /// /// public bool SetTriggerSource(int source) { // ch:触发源选择:0 - Line0; | en:Trigger source select:0 - Line0; // 1 - Line1; // 2 - Line2; // 3 - Line3; // 4 - Counter; // 7 - Software; string sourceStr; switch (source) { case 0: sourceStr = "Line1"; break; case 1: sourceStr = "Line1"; break; case 2: sourceStr = "Line2"; break; case 3: sourceStr = "Line3"; break; case 4: sourceStr = "Counter"; break; case 7: sourceStr = "Software"; break; default: throw new ArgumentOutOfRangeException(nameof(source), "Invalid trigger source value"); } uint nReVal = m_currentDev.SetEnumValueByStringEx(SciCam.SciCamDeviceXmlType.SciCam_DeviceXml_Camera, "TriggerSource", "Line1"); if (SciCam.SCI_CAMERA_OK != nReVal) { LogHelper.Info("Set TriggerSource to Line1 fail! "); return false; } return true; } /// /// Retrieves the current trigger source setting of the camera. /// /// This method queries the camera's parameters to determine the current trigger source. /// If the retrieval is unsuccessful or the trigger source is not recognized, the method returns -1. /// An integer representing the trigger source: 0 for /// "Line0". 1 for "Line1". 2 /// for "Line2". 3 for "Line3". /// 4 for "Counter". 7 for /// "Software". -1 if the trigger source is unknown or if the retrieval /// fails. public int GetTriggerSource() { SciCam.SCI_NODE_VAL_ENUM enumValue = new SciCam.SCI_NODE_VAL_ENUM(); uint result = m_currentDev.GetEnumValueEx(SciCam.SciCamDeviceXmlType.SciCam_DeviceXml_Card, "TriggerSource", ref enumValue); if (result == MvError.MV_OK) { switch (enumValue.items[0].desc) { case "Line0": return 0; case "Line1": return 1; case "Line2": return 2; case "Line3": return 3; case "Counter": return 4; case "Software": return 7; default: return -1; // 未知触发源 } } return -1; // 获取失败 } /// /// Sends a software trigger command to the camera. /// /// This method triggers the camera to capture an image or perform an action based on /// its current configuration. Ensure the camera is properly initialized and configured to respond to software /// triggers before calling this method. public void TriggerSoftware() { // ch:触发软件 | en:Trigger Software // 执行 uint nReVal = m_currentDev.SetCommandValue("TriggerSoftware"); if (nReVal != SciCam.SCI_CAMERA_OK) { LogHelper.Info("TriggerSoftware写入失败"); } } private void GetStreamThreadProc() { m_currentDev.StartGrabbing(); uint nReVal = SciCam.SCI_CAMERA_OK; IntPtr payload = IntPtr.Zero; if (m_currentDev.IsDeviceOpen()) { while (IsGrabbing) { nReVal = m_currentDev.Grab(ref payload); if (nReVal == SciCam.SCI_CAMERA_OK) { int reVal = GetConvertedInfo(payload); } nReVal = m_currentDev.FreePayload(payload); } } IsGrabbing = false; } private int GetConvertedInfo(IntPtr payload) { if (payload == IntPtr.Zero) { return -1; } SciCam.SCI_CAM_PAYLOAD_ATTRIBUTE payloadAttribute = new SciCam.SCI_CAM_PAYLOAD_ATTRIBUTE(); uint nReVal = SciCam.PayloadGetAttribute(payload, ref payloadAttribute); if (nReVal != SciCam.SCI_CAMERA_OK) { return -1; } bool imgIsComplete = payloadAttribute.isComplete; SciCam.SciCamPayloadMode payloadMode = payloadAttribute.payloadMode; SciCam.SciCamPixelType imgPixelType = payloadAttribute.imgAttr.pixelType; ulong imgWidth = payloadAttribute.imgAttr.width; ulong imgHeight = payloadAttribute.imgAttr.height; ulong framID = payloadAttribute.frameID; if (!imgIsComplete || payloadMode != SciCam.SciCamPayloadMode.SciCam_PayloadMode_2D) { return -1; } IntPtr imgData = IntPtr.Zero; nReVal = SciCam.PayloadGetImage(payload, ref imgData); if (nReVal != SciCam.SCI_CAMERA_OK) { return -1; } long destImgSize = 0; if (imgPixelType == SciCam.SciCamPixelType.Mono1p || imgPixelType == SciCam.SciCamPixelType.Mono2p || imgPixelType == SciCam.SciCamPixelType.Mono4p || imgPixelType == SciCam.SciCamPixelType.Mono8s || imgPixelType == SciCam.SciCamPixelType.Mono8 || imgPixelType == SciCam.SciCamPixelType.Mono10 || imgPixelType == SciCam.SciCamPixelType.Mono10p || imgPixelType == SciCam.SciCamPixelType.Mono12 || imgPixelType == SciCam.SciCamPixelType.Mono12p || imgPixelType == SciCam.SciCamPixelType.Mono14 || imgPixelType == SciCam.SciCamPixelType.Mono16 || imgPixelType == SciCam.SciCamPixelType.Mono10Packed || imgPixelType == SciCam.SciCamPixelType.Mono12Packed || imgPixelType == SciCam.SciCamPixelType.Mono14p) { nReVal = SciCam.PayloadConvertImageEx(ref payloadAttribute.imgAttr, imgData, SciCam.SciCamPixelType.Mono8, IntPtr.Zero, ref destImgSize, true, 0); if (nReVal == SciCam.SCI_CAMERA_OK) { IntPtr destImg = Marshal.AllocHGlobal((int)destImgSize); try { nReVal = SciCam.PayloadConvertImageEx(ref payloadAttribute.imgAttr, imgData, SciCam.SciCamPixelType.Mono8, destImg, ref destImgSize, true, 0); if (nReVal == SciCam.SCI_CAMERA_OK) { byte[] bBitmap = new byte[destImgSize]; Marshal.Copy(destImg, bBitmap, 0, (int)destImgSize); Bitmap bitMap = new Bitmap((int)imgWidth, (int)imgHeight, GdiPlus.PixelFormat.Format8bppIndexed); GdiPlus.BitmapData bitmapData = bitMap.LockBits(new Rectangle(0, 0, (int)imgWidth, (int)imgHeight), GdiPlus.ImageLockMode.WriteOnly, GdiPlus.PixelFormat.Format8bppIndexed); Marshal.Copy(bBitmap, 0, bitmapData.Scan0, (int)destImgSize); bitMap.UnlockBits(bitmapData); //设置调色板 GdiPlus.ColorPalette palette = bitMap.Palette; for (int i = 0; i < 256; i++) { palette.Entries[i] = Color.FromArgb(i, i, i); } bitMap.Palette = palette; //显示图片 Image = new CogImage8Grey(bitMap); ImageCallbackEvent?.Invoke(Image, TotalTime, ErrorMessage); GrabImageCallbackEvent?.Invoke(Image); } } catch (Exception ex) { } finally { Marshal.FreeHGlobal(destImg); } } } else { nReVal = SciCam.PayloadConvertImageEx(ref payloadAttribute.imgAttr, imgData, SciCam.SciCamPixelType.RGB8, IntPtr.Zero, ref destImgSize, true, 0); if (nReVal == SciCam.SCI_CAMERA_OK) { IntPtr destImg = Marshal.AllocHGlobal((int)destImgSize); try { nReVal = SciCam.PayloadConvertImageEx(ref payloadAttribute.imgAttr, imgData, SciCam.SciCamPixelType.RGB8, destImg, ref destImgSize, true, 0); if (nReVal == SciCam.SCI_CAMERA_OK) { byte[] bBitmap = new byte[destImgSize]; Marshal.Copy(destImg, bBitmap, 0, (int)destImgSize); Bitmap bitMap = new Bitmap((int)imgWidth, (int)imgHeight, GdiPlus.PixelFormat.Format24bppRgb); GdiPlus.BitmapData bitmapData = bitMap.LockBits(new Rectangle(0, 0, (int)imgWidth, (int)imgHeight), GdiPlus.ImageLockMode.WriteOnly, GdiPlus.PixelFormat.Format24bppRgb); Marshal.Copy(bBitmap, 0, bitmapData.Scan0, (int)destImgSize); bitMap.UnlockBits(bitmapData); //显示图片 Image = new CogImage8Grey(bitMap); ImageCallbackEvent?.Invoke(Image, TotalTime, ErrorMessage); GrabImageCallbackEvent?.Invoke(Image); } } catch (Exception ex) { } finally { Marshal.FreeHGlobal(destImg); } } } return 0; } /// /// Converts raw image data into an object, supporting both monochrome and color pixel /// formats. /// /// This method supports both monochrome (PixelType_Gvsp_Mono8) and color pixel formats. /// For color images, the method processes the image data as a planar color format. /// The height of the image in pixels. /// The width of the image in pixels. /// A pointer to the buffer containing the raw image data. /// The pixel format of the image, specified as a value. /// An object representing the converted image. Returns if the /// conversion fails. private ICogImage ConvertToICogImage(UInt32 nHeight, UInt32 nWidth, IntPtr pImageBuf, MvCameraControl.MvGvspPixelType enPixelType) { ICogImage cogImage = null; // ch:获取步长 || en: Get nRowStep uint m_nRowStep = nWidth * nHeight; // ch: 显示 || display try { if (enPixelType == MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono8) { CogImage8Root cogImage8Root = new CogImage8Root(); cogImage8Root.Initialize((Int32)nWidth, (Int32)nHeight, pImageBuf, (Int32)nWidth, null); CogImage8Grey cogImage8Grey = new CogImage8Grey(); cogImage8Grey.SetRoot(cogImage8Root); cogImage = cogImage8Grey.ScaleImage((int)nWidth, (int)nHeight); System.GC.Collect(); } else { CogImage8Root image0 = new CogImage8Root(); IntPtr ptr0 = new IntPtr(pImageBuf.ToInt64()); image0.Initialize((int)nWidth, (int)nHeight, ptr0, (int)nWidth, null); CogImage8Root image1 = new CogImage8Root(); IntPtr ptr1 = new IntPtr(pImageBuf.ToInt64() + m_nRowStep); image1.Initialize((int)nWidth, (int)nHeight, ptr1, (int)nWidth, null); CogImage8Root image2 = new CogImage8Root(); IntPtr ptr2 = new IntPtr(pImageBuf.ToInt64() + m_nRowStep * 2); image2.Initialize((int)nWidth, (int)nHeight, ptr2, (int)nWidth, null); CogImage24PlanarColor colorImage = new CogImage24PlanarColor(); colorImage.SetRoots(image0, image1, image2); cogImage = colorImage.ScaleImage((int)nWidth, (int)nHeight); System.GC.Collect(); } } catch (System.Exception ex) { ErrorMessage = $"转换ICogImage出错: {ex.Message}"; return null; } return cogImage; } /// /// 图像是否为Mono格式 /// /// /// private bool IsMonoPixelFormat(MvCameraControl.MvGvspPixelType enType) { switch (enType) { case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono8: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono10: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono10_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono12: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono12_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_Mono16: return true; default: return false; } } /// /// 图像是否为彩色 /// /// /// private bool IsColorPixelFormat(MvCameraControl.MvGvspPixelType enType) { switch (enType) { case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_RGBA8_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BGRA8_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_YUV422_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_YUV422_YUYV_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGR8: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRG8: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGB8: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerBG8: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRBGG8: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGB10: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGB10_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerBG10: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerBG10_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRG10: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRG10_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGR10: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGR10_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGB12: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGB12_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerBG12: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerBG12_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRG12: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRG12_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGR12: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGR12_Packed: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGR16: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerRG16: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerGB16: case MvCameraControl.MvGvspPixelType.PixelType_Gvsp_BayerBG16: return true; default: return false; } } #endregion #region 属性通知 /// /// Occurs when a property value changes. /// public event PropertyChangedEventHandler PropertyChanged; /// /// Checks if a property already matches a desired value. Sets the property and /// notifies listeners only when necessary. /// /// Type of the property. /// Reference to a property with both getter and setter. /// Desired value for the property. /// Name of the property used to notify listeners. This /// value is optional and can be provided automatically when invoked from compilers that /// support CallerMemberName. /// True if the value was changed, false if the existing value matched the /// desired value. protected virtual bool SetProperty(ref T storage, T value, [CallerMemberName] string propertyName = null) { if (EqualityComparer.Default.Equals(storage, value)) return false; storage = value; RaisePropertyChanged(propertyName); return true; } /// /// Checks if a property already matches a desired value. Sets the property and /// notifies listeners only when necessary. /// /// Type of the property. /// Reference to a property with both getter and setter. /// Desired value for the property. /// Name of the property used to notify listeners. This /// value is optional and can be provided automatically when invoked from compilers that /// support CallerMemberName. /// Action that is called after the property value has been changed. /// True if the value was changed, false if the existing value matched the /// desired value. protected virtual bool SetProperty(ref T storage, T value, Action onChanged, [CallerMemberName] string propertyName = null) { if (EqualityComparer.Default.Equals(storage, value)) return false; storage = value; onChanged?.Invoke(); RaisePropertyChanged(propertyName); return true; } /// /// Raises this object's PropertyChanged event. /// /// Name of the property used to notify listeners. This /// value is optional and can be provided automatically when invoked from compilers /// that support . protected void RaisePropertyChanged([CallerMemberName] string propertyName = null) { OnPropertyChanged(new PropertyChangedEventArgs(propertyName)); } /// /// Raises this object's PropertyChanged event. /// /// The PropertyChangedEventArgs protected virtual void OnPropertyChanged(PropertyChangedEventArgs args) { PropertyChanged?.Invoke(this, args); } public bool CheckGrabImageCallbackEventIsHas(Action action) { if (GrabImageCallbackEvent == null) return false; return GrabImageCallbackEvent.GetInvocationList().Contains(action); } public bool CheckImageCallbackEventIsHas(Action action) { if (ImageCallbackEvent == null) return false; return ImageCallbackEvent.GetInvocationList().Contains(action); } public bool CheckCameraConnectChangedEventIsHas(Action action) { if (CameraConnectChangedEvent == null) return false; return CameraConnectChangedEvent.GetInvocationList().Contains(action); } #endregion } }