合并图像处理库,删除图像lib库

This commit is contained in:
李伟
2026-04-13 13:40:37 +08:00
parent 2a762396d5
commit c7ce4ea6a1
105 changed files with 16341 additions and 133 deletions
+14
View File
@@ -15,6 +15,20 @@
packages/
*.nupkg
[Dd]ebug/
[Rr]elease/
x64/
x86/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]ib/ # 重点:过滤lib文件夹
[Ll]og/
[Ll]ogs/
lib/
# 排除 Libs 目录中的 DLL 和 PDB 文件(但保留目录结构)
XplorePlane/Libs/Hardware/*.dll
XplorePlane/Libs/Hardware/*.pdb
Binary file not shown.
+4
View File
@@ -0,0 +1,4 @@
{
"Language": "zh-CN",
"LogLevel": "Debug"
}
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
+106
View File
@@ -0,0 +1,106 @@
"\n"
"General configuration for OpenCV 4.9.0 =====================================\n"
" Version control: 4.9.0-265-g79534d600a\n"
"\n"
" Extra modules:\n"
" Location (extra): G:/bb/cv_x64/build/build_x86_64/../opencv_contrib/modules\n"
" Version control (extra): 4.9.0-66-g61e23082\n"
"\n"
" Platform:\n"
" Timestamp: 2024-04-27T12:51:52Z\n"
" Host: Windows 10.0.22000 AMD64\n"
" CMake: 3.23.0\n"
" CMake generator: Visual Studio 17 2022\n"
" CMake build tool: C:/Program Files/Microsoft Visual Studio/2022/Community/MSBuild/Current/Bin/amd64/MSBuild.exe\n"
" MSVC: 1939\n"
" Configuration: Debug Release MinSizeRel RelWithDebInfo\n"
"\n"
" CPU/HW features:\n"
" Baseline: SSE SSE2 SSE3\n"
" requested: SSE3\n"
"\n"
" C/C++:\n"
" Built as dynamic libs?: NO\n"
" C++ standard: 11\n"
" C++ Compiler: C:/Program Files/Microsoft Visual Studio/2022/Community/VC/Tools/MSVC/14.39.33519/bin/Hostx64/x64/cl.exe (ver 19.39.33523.0)\n"
" C++ flags (Release): /DWIN32 /D_WINDOWS /W4 /GR /D _CRT_SECURE_NO_DEPRECATE /D _CRT_NONSTDC_NO_DEPRECATE /D _SCL_SECURE_NO_WARNINGS /Gy /bigobj /Oi /fp:precise /EHa /wd4127 /wd4251 /wd4324 /wd4275 /wd4512 /wd4589 /wd4819 /MP /MD /O2 /Ob2 /DNDEBUG \n"
" C++ flags (Debug): /DWIN32 /D_WINDOWS /W4 /GR /D _CRT_SECURE_NO_DEPRECATE /D _CRT_NONSTDC_NO_DEPRECATE /D _SCL_SECURE_NO_WARNINGS /Gy /bigobj /Oi /fp:precise /EHa /wd4127 /wd4251 /wd4324 /wd4275 /wd4512 /wd4589 /wd4819 /MP /MDd /Zi /Ob0 /Od /RTC1 \n"
" C Compiler: C:/Program Files/Microsoft Visual Studio/2022/Community/VC/Tools/MSVC/14.39.33519/bin/Hostx64/x64/cl.exe\n"
" C flags (Release): /DWIN32 /D_WINDOWS /W3 /D _CRT_SECURE_NO_DEPRECATE /D _CRT_NONSTDC_NO_DEPRECATE /D _SCL_SECURE_NO_WARNINGS /Gy /bigobj /Oi /fp:precise /MP /MD /O2 /Ob2 /DNDEBUG \n"
" C flags (Debug): /DWIN32 /D_WINDOWS /W3 /D _CRT_SECURE_NO_DEPRECATE /D _CRT_NONSTDC_NO_DEPRECATE /D _SCL_SECURE_NO_WARNINGS /Gy /bigobj /Oi /fp:precise /MP /MDd /Zi /Ob0 /Od /RTC1 \n"
" Linker flags (Release): /machine:x64 /INCREMENTAL:NO \n"
" Linker flags (Debug): /machine:x64 /debug /INCREMENTAL \n"
" ccache: NO\n"
" Precompiled headers: YES\n"
" Extra dependencies: wsock32 comctl32 gdi32 ole32 setupapi ws2_32 G:/bb/cv_x64/build/build_x86_64/install/lib/freetype.lib G:/bb/cv_x64/build/build_x86_64/install/lib/harfbuzz.lib G:/bb/cv_x64/build/build_x86_64/install/lib/libhdf5.lib\n"
" 3rdparty dependencies: libprotobuf ade ittnotify libjpeg-turbo libwebp libpng libtiff libopenjp2 IlmImf zlib\n"
"\n"
" OpenCV modules:\n"
" To be built: alphamat aruco bgsegm bioinspired calib3d ccalib core datasets dnn dnn_objdetect dnn_superres dpm face features2d flann freetype fuzzy gapi hdf hfs highgui img_hash imgcodecs imgproc intensity_transform line_descriptor mcc ml objdetect optflow phase_unwrapping photo plot quality rapid reg rgbd saliency shape stereo stitching structured_light superres surface_matching text tracking ts video videoio videostab wechat_qrcode xfeatures2d ximgproc xobjdetect xphoto\n"
" Disabled: java python_bindings_generator python_tests world\n"
" Disabled by dependency: -\n"
" Unavailable: cannops cudaarithm cudabgsegm cudacodec cudafeatures2d cudafilters cudaimgproc cudalegacy cudaobjdetect cudaoptflow cudastereo cudawarping cudev cvv julia matlab ovis python2 python3 sfm viz\n"
" Applications: perf_tests\n"
" Documentation: NO\n"
" Non-free algorithms: NO\n"
"\n"
" Windows RT support: NO\n"
"\n"
" GUI: WIN32UI\n"
" Win32 UI: YES\n"
" VTK support: NO\n"
"\n"
" Media I/O: \n"
" ZLib: build (ver 1.3)\n"
" JPEG: build-libjpeg-turbo (ver 2.1.3-62)\n"
" SIMD Support Request: YES\n"
" SIMD Support: NO\n"
" WEBP: build (ver encoder: 0x020f)\n"
" PNG: build (ver 1.6.37)\n"
" TIFF: build (ver 42 - 4.2.0)\n"
" JPEG 2000: build (ver 2.5.0)\n"
" OpenEXR: build (ver 2.3.0)\n"
" HDR: YES\n"
" SUNRASTER: YES\n"
" PXM: YES\n"
" PFM: YES\n"
"\n"
" Video I/O:\n"
" DC1394: NO\n"
" FFMPEG: YES (prebuilt binaries)\n"
" avcodec: YES (58.134.100)\n"
" avformat: YES (58.76.100)\n"
" avutil: YES (56.70.100)\n"
" swscale: YES (5.9.100)\n"
" avresample: YES (4.0.0)\n"
" GStreamer: NO\n"
" DirectShow: YES\n"
" Media Foundation: YES\n"
" DXVA: YES\n"
"\n"
" Parallel framework: Concurrency\n"
"\n"
" Trace: YES (with Intel ITT)\n"
"\n"
" Other third-party libraries:\n"
" Eigen: YES (ver 3.4.0)\n"
" Custom HAL: NO\n"
" Protobuf: build (3.19.1)\n"
" Flatbuffers: builtin/3rdparty (23.5.9)\n"
"\n"
" OpenCL: YES (NVD3D11)\n"
" Include path: G:/bb/cv_x64/build/opencv/3rdparty/include/opencl/1.2\n"
" Link libraries: Dynamic load\n"
"\n"
" Python (for build): C:/python-virt/python37/python.exe\n"
"\n"
" Java: \n"
" ant: NO\n"
" Java: YES (ver 1.8.0.202)\n"
" JNI: C:/Program Files/Microsoft/jdk-11.0.16.101-hotspot/include C:/Program Files/Microsoft/jdk-11.0.16.101-hotspot/include/win32 C:/Program Files/Microsoft/jdk-11.0.16.101-hotspot/include\n"
" Java wrappers: NO\n"
" Java tests: NO\n"
"\n"
" Install to: G:/bb/cv_x64/build/build_x86_64/install\n"
"-----------------------------------------------------------------\n"
"\n"
@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UseWPF>true</UseWPF>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Update="Resources\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
</EmbeddedResource>
<EmbeddedResource Update="Resources\Resources.zh-CN.resx">
<DependentUpon>Resources.resx</DependentUpon>
</EmbeddedResource>
</ItemGroup>
<ItemGroup>
<PackageReference Include="MahApps.Metro" Version="2.4.11" />
<PackageReference Include="MahApps.Metro.IconPacks" Version="6.2.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\ImageProcessing.Core\ImageProcessing.Core.csproj" />
</ItemGroup>
</Project>
@@ -0,0 +1,50 @@
using System.Globalization;
using System.Resources;
namespace ImageProcessing.Controls;
/// <summary>
/// 本地化辅助类,用于管理多语言资源
/// 与 ImageProcessing 主项目的语言设置同步
/// </summary>
public static class LocalizationHelper
{
private static ResourceManager? _resourceManager;
/// <summary>
/// 资源管理器
/// </summary>
private static ResourceManager ResourceManager
{
get
{
if (_resourceManager == null)
{
_resourceManager = new ResourceManager(
"ImageProcessing.Controls.Resources.Resources",
typeof(LocalizationHelper).Assembly);
}
return _resourceManager;
}
}
/// <summary>
/// 获取本地化字符串
/// 使用当前 UI 文化(与主项目同步)
/// </summary>
/// <param name="key">资源键</param>
/// <returns>本地化字符串</returns>
public static string GetString(string key)
{
try
{
// 使用 CultureInfo.CurrentUICulture,这会自动与主项目的语言设置同步
var value = ResourceManager.GetString(key, CultureInfo.CurrentUICulture);
return value ?? key;
}
catch
{
return key;
}
}
}
@@ -0,0 +1,38 @@
<UserControl x:Class="ImageProcessing.Controls.ProcessorParameterControl"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
mc:Ignorable="d"
d:DesignHeight="400" d:DesignWidth="380">
<Grid>
<Grid.RowDefinitions>
<RowDefinition Height="Auto" />
<RowDefinition Height="*" />
</Grid.RowDefinitions>
<!-- 算子信息 -->
<Border Grid.Row="0"
Background="Transparent"
BorderBrush="#FFD5DFE5"
BorderThickness="1"
Padding="10"
Margin="0,0,0,10">
<StackPanel>
<TextBlock x:Name="txtProcessorName"
FontSize="14"
FontWeight="Bold" />
<TextBlock x:Name="txtProcessorDescription"
FontSize="12"
Foreground="Gray"
TextWrapping="Wrap"
Margin="0,5,0,0" />
</StackPanel>
</Border>
<!-- 参数列表 -->
<ScrollViewer Grid.Row="1" VerticalScrollBarVisibility="Auto">
<StackPanel x:Name="pnlParameters" Margin="5" />
</ScrollViewer>
</Grid>
</UserControl>
@@ -0,0 +1,377 @@
using ImageProcessing.Core;
using System.Windows;
using System.Windows.Controls;
namespace ImageProcessing.Controls;
/// <summary>
/// 通用参数配置 UserControl
/// 可以根据不同算子的参数自动生成对应的 UI 控件
/// </summary>
public partial class ProcessorParameterControl : UserControl
{
private ImageProcessorBase? _currentProcessor;
/// <summary>
/// 参数变化事件
/// </summary>
public event EventHandler? ParameterChanged;
public ProcessorParameterControl()
{
InitializeComponent();
UpdateNoProcessorText();
}
/// <summary>
/// 更新"未选择算子"的文本
/// </summary>
private void UpdateNoProcessorText()
{
txtProcessorName.Text = LocalizationHelper.GetString("NoProcessorSelected");
txtProcessorDescription.Text = LocalizationHelper.GetString("PleaseSelectProcessor");
}
/// <summary>
/// 触发参数变化事件
/// </summary>
protected virtual void OnParameterChanged()
{
ParameterChanged?.Invoke(this, EventArgs.Empty);
}
/// <summary>
/// 加载算子参数并生成 UI
/// </summary>
public void LoadProcessor(ImageProcessorBase? processor)
{
_currentProcessor = processor;
pnlParameters.Children.Clear();
if (processor == null)
{
UpdateNoProcessorText();
return;
}
// 显示算子信息
txtProcessorName.Text = processor.Name;
txtProcessorDescription.Text = processor.Description;
// 生成参数控件
var parameters = processor.GetParameters();
foreach (var param in parameters)
{
CreateParameterControl(param);
}
}
/// <summary>
/// 根据参数类型创建对应的控件
/// </summary>
private void CreateParameterControl(ProcessorParameter param)
{
// 如果参数不可见,跳过创建
if (!param.IsVisible)
{
return;
}
// 参数标签
var label = new TextBlock
{
Text = param.DisplayName + ":",
Margin = new Thickness(0, 10, 0, 5),
FontWeight = FontWeights.Bold,
FontSize = 13
};
pnlParameters.Children.Add(label);
// 根据参数类型创建不同的控件
UIElement? control = null;
if (param.ValueType == typeof(int))
{
control = CreateIntegerControl(param);
}
else if (param.ValueType == typeof(double) || param.ValueType == typeof(float))
{
control = CreateDoubleControl(param);
}
else if (param.ValueType == typeof(bool))
{
control = CreateBooleanControl(param);
}
else if (param.ValueType == typeof(string) && param.Options != null)
{
control = CreateComboBoxControl(param);
}
else if (param.ValueType == typeof(string))
{
control = CreateTextBoxControl(param);
}
if (control != null)
{
pnlParameters.Children.Add(control);
// 添加描述标签
if (!string.IsNullOrEmpty(param.Description))
{
var desc = new TextBlock
{
Text = param.Description,
Margin = new Thickness(0, 5, 0, 0),
FontSize = 11,
Foreground = System.Windows.Media.Brushes.Gray,
TextWrapping = TextWrapping.Wrap
};
pnlParameters.Children.Add(desc);
}
}
}
/// <summary>
/// 创建整数类型控件(Slider + TextBox 或仅 TextBox
/// 当 MinValue 和 MaxValue 都为 null 时,只显示文本框,不显示滑块
/// </summary>
private UIElement CreateIntegerControl(ProcessorParameter param)
{
var panel = new StackPanel();
var textBox = new TextBox
{
Text = param.Value.ToString(),
Width = 100,
HorizontalAlignment = HorizontalAlignment.Left
};
if (param.MinValue != null && param.MaxValue != null)
{
var slider = new Slider
{
Minimum = Convert.ToDouble(param.MinValue),
Maximum = Convert.ToDouble(param.MaxValue),
Value = Convert.ToDouble(param.Value),
TickFrequency = 1,
IsSnapToTickEnabled = true,
Margin = new Thickness(0, 0, 0, 5)
};
slider.ValueChanged += (s, e) =>
{
int value = (int)slider.Value;
textBox.Text = value.ToString();
_currentProcessor?.SetParameter(param.Name, value);
OnParameterChanged();
};
textBox.TextChanged += (s, e) =>
{
if (int.TryParse(textBox.Text, out int value))
{
var min = Convert.ToInt32(param.MinValue);
var max = Convert.ToInt32(param.MaxValue);
if (value >= min && value <= max)
{
slider.Value = value;
}
}
};
panel.Children.Add(slider);
}
else
{
textBox.TextChanged += (s, e) =>
{
if (int.TryParse(textBox.Text, out int value))
{
_currentProcessor?.SetParameter(param.Name, value);
OnParameterChanged();
}
};
}
panel.Children.Add(textBox);
return panel;
}
/// <summary>
/// 创建浮点数类型控件(Slider + TextBox 或仅 TextBox
/// 当 MinValue 和 MaxValue 都为 null 时,只显示文本框,不显示滑块
/// </summary>
private UIElement CreateDoubleControl(ProcessorParameter param)
{
var panel = new StackPanel();
var textBox = new TextBox
{
Text = Convert.ToDouble(param.Value).ToString("F2"),
Width = 100,
HorizontalAlignment = HorizontalAlignment.Left
};
if (param.MinValue != null && param.MaxValue != null)
{
var slider = new Slider
{
Minimum = Convert.ToDouble(param.MinValue),
Maximum = Convert.ToDouble(param.MaxValue),
Value = Convert.ToDouble(param.Value),
TickFrequency = 0.1,
Margin = new Thickness(0, 0, 0, 5)
};
slider.ValueChanged += (s, e) =>
{
double value = Math.Round(slider.Value, 2);
textBox.Text = value.ToString("F2");
_currentProcessor?.SetParameter(param.Name, value);
OnParameterChanged();
};
textBox.TextChanged += (s, e) =>
{
if (double.TryParse(textBox.Text, out double value))
{
var min = Convert.ToDouble(param.MinValue);
var max = Convert.ToDouble(param.MaxValue);
if (value >= min && value <= max)
{
slider.Value = value;
}
}
};
panel.Children.Add(slider);
}
else
{
textBox.TextChanged += (s, e) =>
{
if (double.TryParse(textBox.Text, out double value))
{
_currentProcessor?.SetParameter(param.Name, value);
OnParameterChanged();
}
};
}
panel.Children.Add(textBox);
return panel;
}
/// <summary>
/// 创建布尔类型控件(CheckBox)
/// </summary>
private UIElement CreateBooleanControl(ProcessorParameter param)
{
var checkBox = new CheckBox
{
Content = param.DisplayName,
IsChecked = Convert.ToBoolean(param.Value),
Margin = new Thickness(0, 5, 0, 0)
};
checkBox.Checked += (s, e) =>
{
_currentProcessor?.SetParameter(param.Name, true);
OnParameterChanged();
};
checkBox.Unchecked += (s, e) =>
{
_currentProcessor?.SetParameter(param.Name, false);
OnParameterChanged();
};
return checkBox;
}
/// <summary>
/// 创建下拉框控件(ComboBox
/// </summary>
private UIElement CreateComboBoxControl(ProcessorParameter param)
{
var comboBox = new ComboBox
{
Margin = new Thickness(0, 5, 0, 0),
Width = 200,
HorizontalAlignment = HorizontalAlignment.Left
};
if (param.Options != null)
{
foreach (var option in param.Options)
{
comboBox.Items.Add(option);
}
}
comboBox.SelectedItem = param.Value;
comboBox.SelectionChanged += (s, e) =>
{
if (comboBox.SelectedItem != null)
{
_currentProcessor?.SetParameter(param.Name, comboBox.SelectedItem.ToString()!);
// 如果是 FilterType 参数,重新加载界面以更新参数可见性
if (param.Name == "FilterType")
{
LoadProcessor(_currentProcessor);
}
OnParameterChanged();
}
};
return comboBox;
}
/// <summary>
/// 创建文本框控件(TextBox
/// </summary>
private UIElement CreateTextBoxControl(ProcessorParameter param)
{
var textBox = new TextBox
{
Text = param.Value?.ToString() ?? "",
Margin = new Thickness(0, 5, 0, 0),
Width = 200,
HorizontalAlignment = HorizontalAlignment.Left
};
textBox.TextChanged += (s, e) =>
{
_currentProcessor?.SetParameter(param.Name, textBox.Text);
OnParameterChanged();
};
return textBox;
}
/// <summary>
/// 获取当前配置的算子
/// </summary>
public ImageProcessorBase? GetProcessor()
{
return _currentProcessor;
}
/// <summary>
/// 清空参数控件
/// </summary>
public void Clear()
{
_currentProcessor = null;
pnlParameters.Children.Clear();
UpdateNoProcessorText();
}
}
@@ -0,0 +1,69 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<!-- ProcessorParameterControl -->
<data name="NoProcessorSelected" xml:space="preserve">
<value>No Processor Selected</value>
</data>
<data name="PleaseSelectProcessor" xml:space="preserve">
<value>Please select an image processor</value>
</data>
</root>
@@ -0,0 +1,69 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<!-- ProcessorParameterControl -->
<data name="NoProcessorSelected" xml:space="preserve">
<value>未选择算子</value>
</data>
<data name="PleaseSelectProcessor" xml:space="preserve">
<value>请选择一个图像处理算子</value>
</data>
</root>
@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Emgu.CV" Version="4.10.0.5680" />
<PackageReference Include="Emgu.CV.runtime.windows" Version="4.10.0.5680" />
</ItemGroup>
</Project>
+181
View File
@@ -0,0 +1,181 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ImageProcessorBase.cs
// 描述: 8位图像处理算子基类,定义图像处理算子的通用接口和行为
// 功能:
// - 定义算子的基本属性(名称、描述)
// - 参数管理(设置、获取、验证)
// - ROI(感兴趣区域)处理支持
// - 输出数据管理(用于传递额外信息如轮廓等)
// - 为所有8位图像处理算子提供统一的基础框架
// 设计模式: 模板方法模式
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.CV.Util;
namespace ImageProcessing.Core;
/// <summary>
/// 图像处理算子基类
/// </summary>
public abstract class ImageProcessorBase
{
/// <summary>算子名称</summary>
public string Name { get; protected set; } = string.Empty;
/// <summary>算子描述</summary>
public string Description { get; protected set; } = string.Empty;
/// <summary>参数字典</summary>
protected Dictionary<string, ProcessorParameter> Parameters { get; set; }
/// <summary>输出数据(用于传递额外信息如轮廓等)</summary>
public Dictionary<string, object> OutputData { get; protected set; }
/// <summary>ROI区域</summary>
public System.Drawing.Rectangle? ROI { get; set; }
/// <summary>多边形ROI点集</summary>
public System.Drawing.Point[]? PolygonROIPoints { get; set; }
protected ImageProcessorBase()
{
Parameters = new Dictionary<string, ProcessorParameter>();
OutputData = new Dictionary<string, object>();
InitializeParameters();
}
/// <summary>
/// 初始化算子参数(子类实现)
/// </summary>
protected abstract void InitializeParameters();
/// <summary>
/// 执行图像处理(子类实现)
/// </summary>
public abstract Image<Gray, byte> Process(Image<Gray, byte> inputImage);
/// <summary>
/// 执行图像处理(带矩形ROI支持)
/// </summary>
public Image<Gray, byte> ProcessWithROI(Image<Gray, byte> inputImage)
{
if (ROI.HasValue && ROI.Value != System.Drawing.Rectangle.Empty)
{
inputImage.ROI = ROI.Value;
var roiImage = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
var processedROI = Process(roiImage);
// 将 ROI 偏移量保存到输出数据中,供轮廓绘制等使用
OutputData["ROIOffset"] = new System.Drawing.Point(ROI.Value.X, ROI.Value.Y);
var result = inputImage.Clone();
result.ROI = ROI.Value;
processedROI.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
roiImage.Dispose();
processedROI.Dispose();
return result;
}
return Process(inputImage);
}
/// <summary>
/// 执行图像处理(带多边形ROI掩码支持)
/// </summary>
public Image<Gray, byte> ProcessWithPolygonROI(Image<Gray, byte> inputImage)
{
if (PolygonROIPoints == null || PolygonROIPoints.Length < 3)
{
return Process(inputImage);
}
// 创建掩码
var mask = new Image<Gray, byte>(inputImage.Width, inputImage.Height);
mask.SetValue(new Gray(0));
// 绘制多边形掩码(白色表示ROI区域)
using (var vop = new VectorOfPoint(PolygonROIPoints))
{
using (var vvop = new VectorOfVectorOfPoint(vop))
{
CvInvoke.DrawContours(mask, vvop, 0, new MCvScalar(255), -1);
}
}
// 处理整个图像
var processedImage = Process(inputImage);
// 创建结果图像
var result = inputImage.Clone();
// 使用掩码:ROI内使用处理后的像素,ROI外保持原始像素
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
if (mask.Data[y, x, 0] > 0) // 在ROI内
{
result.Data[y, x, 0] = processedImage.Data[y, x, 0];
}
}
}
// 保存ROI信息
OutputData["ROIMask"] = mask;
OutputData["PolygonPoints"] = PolygonROIPoints;
OutputData["ROIOffset"] = System.Drawing.Point.Empty;
processedImage.Dispose();
return result;
}
/// <summary>
/// 获取所有参数列表
/// </summary>
public List<ProcessorParameter> GetParameters()
{
return new List<ProcessorParameter>(Parameters.Values);
}
/// <summary>
/// 设置参数值
/// </summary>
public void SetParameter(string name, object value)
{
if (Parameters.ContainsKey(name))
{
Parameters[name].Value = value;
}
else
{
throw new ArgumentException($"参数 {name} 不存在");
}
}
/// <summary>
/// 获取参数值
/// </summary>
public T GetParameter<T>(string name)
{
if (Parameters.ContainsKey(name))
{
return (T)Convert.ChangeType(Parameters[name].Value, typeof(T))!;
}
throw new ArgumentException($"参数 {name} 不存在");
}
/// <summary>
/// 获取单个参数
/// </summary>
public ProcessorParameter? GetParameterInfo(string name)
{
return Parameters.ContainsKey(name) ? Parameters[name] : null;
}
}
@@ -0,0 +1,60 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ProcessorParameter.cs
// 描述: 图像处理算子参数定义类,用于描述算子的可配置参数
// 功能:
// - 定义参数的基本属性(名称、类型、默认值)
// - 支持参数范围约束(最小值、最大值)
// - 支持枚举类型参数(下拉选项)
// - 提供参数描述信息用于UI显示
// - 统一的参数管理机制
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
namespace ImageProcessing.Core;
/// <summary>
/// 图像处理算子参数定义
/// </summary>
public class ProcessorParameter
{
/// <summary>参数名称(代码中使用)</summary>
public string Name { get; set; }
/// <summary>显示名称(UI中显示)</summary>
public string DisplayName { get; set; }
/// <summary>参数类型</summary>
public Type ValueType { get; set; }
/// <summary>当前值</summary>
public object Value { get; set; }
/// <summary>最小值(可选)</summary>
public object? MinValue { get; set; }
/// <summary>最大值(可选)</summary>
public object? MaxValue { get; set; }
/// <summary>参数描述</summary>
public string Description { get; set; }
/// <summary>可选值列表(用于下拉框)</summary>
public string[]? Options { get; set; }
/// <summary>参数是否可见</summary>
public bool IsVisible { get; set; } = true;
public ProcessorParameter(string name, string displayName, Type valueType, object defaultValue,
object? minValue = null, object? maxValue = null, string description = "", string[]? options = null)
{
Name = name;
DisplayName = displayName;
ValueType = valueType;
Value = defaultValue;
MinValue = minValue;
MaxValue = maxValue;
Description = description;
Options = options;
}
}
@@ -0,0 +1,42 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Update="Resources\Resources.resx">
<Generator>PublicResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
</EmbeddedResource>
<EmbeddedResource Update="Resources\Resources.zh-CN.resx">
<DependentUpon>Resources.resx</DependentUpon>
</EmbeddedResource>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Emgu.CV" Version="4.10.0.5680" />
<PackageReference Include="Emgu.CV.runtime.windows" Version="4.10.0.5680" />
<PackageReference Include="Emgu.CV.Bitmap" Version="4.10.0.5680" />
<PackageReference Include="Microsoft.ML.OnnxRuntime.Gpu" Version="1.17.3" />
<PackageReference Include="Serilog" Version="4.3.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.1.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\ImageProcessing.Core\ImageProcessing.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Update="Resources\Resources.Designer.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
</ItemGroup>
</Project>
@@ -0,0 +1,50 @@
using System.Globalization;
using System.Resources;
namespace ImageProcessing.Processors;
/// <summary>
/// 本地化辅助类,用于管理多语言资源
/// 与 ImageProcessing 主项目的语言设置同步
/// </summary>
public static class LocalizationHelper
{
private static ResourceManager? _resourceManager;
/// <summary>
/// 资源管理器
/// </summary>
private static ResourceManager ResourceManager
{
get
{
if (_resourceManager == null)
{
_resourceManager = new ResourceManager(
"ImageProcessing.Processors.Resources.Resources",
typeof(LocalizationHelper).Assembly);
}
return _resourceManager;
}
}
/// <summary>
/// 获取本地化字符串
/// 使用当前 UI 文化(与主项目同步)
/// </summary>
/// <param name="key">资源键</param>
/// <returns>本地化字符串</returns>
public static string GetString(string key)
{
try
{
// 使用 CultureInfo.CurrentUICulture,这会自动与主项目的语言设置同步
var value = ResourceManager.GetString(key, CultureInfo.CurrentUICulture);
return value ?? key;
}
catch
{
return key;
}
}
}
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,197 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: FilmEffectProcessor.cs
// 描述: 电子胶片效果算子,模拟传统X射线胶片的显示效果
// 功能:
// - 窗宽窗位(Window/Level)调整
// - 胶片反转(正片/负片)
// - 多种胶片特性曲线(线性、S曲线、对数、指数)
// - 边缘增强(模拟胶片锐化效果)
// - 使用查找表(LUT)加速处理
// 算法: 窗宽窗位映射 + 特性曲线变换
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 电子胶片效果算子
/// </summary>
public class FilmEffectProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<FilmEffectProcessor>();
private byte[] _lut = new byte[256];
public FilmEffectProcessor()
{
Name = LocalizationHelper.GetString("FilmEffectProcessor_Name");
Description = LocalizationHelper.GetString("FilmEffectProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("WindowCenter", new ProcessorParameter(
"WindowCenter",
LocalizationHelper.GetString("FilmEffectProcessor_WindowCenter"),
typeof(int),
128,
0,
255,
LocalizationHelper.GetString("FilmEffectProcessor_WindowCenter_Desc")));
Parameters.Add("WindowWidth", new ProcessorParameter(
"WindowWidth",
LocalizationHelper.GetString("FilmEffectProcessor_WindowWidth"),
typeof(int),
255,
1,
255,
LocalizationHelper.GetString("FilmEffectProcessor_WindowWidth_Desc")));
Parameters.Add("Invert", new ProcessorParameter(
"Invert",
LocalizationHelper.GetString("FilmEffectProcessor_Invert"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("FilmEffectProcessor_Invert_Desc")));
Parameters.Add("Curve", new ProcessorParameter(
"Curve",
LocalizationHelper.GetString("FilmEffectProcessor_Curve"),
typeof(string),
"Linear",
null,
null,
LocalizationHelper.GetString("FilmEffectProcessor_Curve_Desc"),
new string[] { "Linear", "Sigmoid", "Logarithmic", "Exponential" }));
Parameters.Add("CurveStrength", new ProcessorParameter(
"CurveStrength",
LocalizationHelper.GetString("FilmEffectProcessor_CurveStrength"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("FilmEffectProcessor_CurveStrength_Desc")));
Parameters.Add("EdgeEnhance", new ProcessorParameter(
"EdgeEnhance",
LocalizationHelper.GetString("FilmEffectProcessor_EdgeEnhance"),
typeof(double),
0.0,
0.0,
3.0,
LocalizationHelper.GetString("FilmEffectProcessor_EdgeEnhance_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int windowCenter = GetParameter<int>("WindowCenter");
int windowWidth = GetParameter<int>("WindowWidth");
bool invert = GetParameter<bool>("Invert");
string curve = GetParameter<string>("Curve");
double curveStrength = GetParameter<double>("CurveStrength");
double edgeEnhance = GetParameter<double>("EdgeEnhance");
// 构建查找表
BuildLUT(windowCenter, windowWidth, invert, curve, curveStrength);
// 应用 LUT
var result = inputImage.Clone();
int width = result.Width;
int height = result.Height;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
result.Data[y, x, 0] = _lut[result.Data[y, x, 0]];
}
}
// 边缘增强(模拟胶片锐化)
if (edgeEnhance > 0.01)
{
using var blurred = inputImage.SmoothGaussian(3);
using var detail = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float diff = inputImage.Data[y, x, 0] - blurred.Data[y, x, 0];
float enhanced = result.Data[y, x, 0] + (float)(diff * edgeEnhance);
result.Data[y, x, 0] = (byte)Math.Clamp((int)enhanced, 0, 255);
}
}
}
_logger.Debug("Process: WC={WC}, WW={WW}, Invert={Inv}, Curve={Curve}, Strength={Str}, Edge={Edge}",
windowCenter, windowWidth, invert, curve, curveStrength, edgeEnhance);
return result;
}
private void BuildLUT(int wc, int ww, bool invert, string curve, double strength)
{
double halfW = ww / 2.0;
double low = wc - halfW;
double high = wc + halfW;
for (int i = 0; i < 256; i++)
{
// 窗宽窗位映射到 [0, 1]
double normalized;
if (ww <= 1)
normalized = i >= wc ? 1.0 : 0.0;
else
normalized = Math.Clamp((i - low) / (high - low), 0.0, 1.0);
// 应用特性曲线
double mapped = curve switch
{
"Sigmoid" => ApplySigmoid(normalized, strength),
"Logarithmic" => ApplyLogarithmic(normalized, strength),
"Exponential" => ApplyExponential(normalized, strength),
_ => normalized // Linear
};
// 反转(负片效果)
if (invert)
mapped = 1.0 - mapped;
_lut[i] = (byte)Math.Clamp((int)(mapped * 255.0), 0, 255);
}
}
/// <summary>S曲线(Sigmoid):增强中间调对比度</summary>
private static double ApplySigmoid(double x, double strength)
{
double k = strength * 10.0;
return 1.0 / (1.0 + Math.Exp(-k * (x - 0.5)));
}
/// <summary>对数曲线:提亮暗部,压缩亮部</summary>
private static double ApplyLogarithmic(double x, double strength)
{
double c = strength;
return Math.Log(1.0 + c * x) / Math.Log(1.0 + c);
}
/// <summary>指数曲线:压缩暗部,增强亮部</summary>
private static double ApplyExponential(double x, double strength)
{
double c = strength;
return (Math.Exp(c * x) - 1.0) / (Math.Exp(c) - 1.0);
}
}
@@ -0,0 +1,149 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: PseudoColorProcessor.cs
// 描述: 伪色彩渲染算子,将灰度图像映射为彩色图像
// 功能:
// - 支持多种 OpenCV 内置色彩映射表(Jet、Hot、Cool、Rainbow 等)
// - 可选灰度范围裁剪,突出感兴趣的灰度区间
// - 可选反转色彩映射方向
// 算法: 查找表(LUT)色彩映射
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 伪色彩渲染算子
/// </summary>
public class PseudoColorProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<PseudoColorProcessor>();
public PseudoColorProcessor()
{
Name = LocalizationHelper.GetString("PseudoColorProcessor_Name");
Description = LocalizationHelper.GetString("PseudoColorProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("ColorMap", new ProcessorParameter(
"ColorMap",
LocalizationHelper.GetString("PseudoColorProcessor_ColorMap"),
typeof(string),
"Jet",
null,
null,
LocalizationHelper.GetString("PseudoColorProcessor_ColorMap_Desc"),
new string[] { "Jet", "Hot", "Cool", "Rainbow", "HSV", "Turbo", "Inferno", "Magma", "Plasma", "Bone", "Ocean", "Spring", "Summer", "Autumn", "Winter" }));
Parameters.Add("MinValue", new ProcessorParameter(
"MinValue",
LocalizationHelper.GetString("PseudoColorProcessor_MinValue"),
typeof(int),
0,
0,
255,
LocalizationHelper.GetString("PseudoColorProcessor_MinValue_Desc")));
Parameters.Add("MaxValue", new ProcessorParameter(
"MaxValue",
LocalizationHelper.GetString("PseudoColorProcessor_MaxValue"),
typeof(int),
255,
0,
255,
LocalizationHelper.GetString("PseudoColorProcessor_MaxValue_Desc")));
Parameters.Add("InvertMap", new ProcessorParameter(
"InvertMap",
LocalizationHelper.GetString("PseudoColorProcessor_InvertMap"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("PseudoColorProcessor_InvertMap_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string colorMapName = GetParameter<string>("ColorMap");
int minValue = GetParameter<int>("MinValue");
int maxValue = GetParameter<int>("MaxValue");
bool invertMap = GetParameter<bool>("InvertMap");
OutputData.Clear();
// 灰度范围裁剪与归一化
Image<Gray, byte> normalized;
if (minValue > 0 || maxValue < 255)
{
// 将 [minValue, maxValue] 映射到 [0, 255]
normalized = inputImage.Clone();
double scale = 255.0 / Math.Max(maxValue - minValue, 1);
for (int y = 0; y < normalized.Height; y++)
{
for (int x = 0; x < normalized.Width; x++)
{
int val = normalized.Data[y, x, 0];
val = Math.Clamp(val, minValue, maxValue);
normalized.Data[y, x, 0] = (byte)((val - minValue) * scale);
}
}
}
else
{
normalized = inputImage.Clone();
}
// 反转灰度(反转色彩映射方向)
if (invertMap)
{
CvInvoke.BitwiseNot(normalized, normalized);
}
// 应用色彩映射
ColorMapType cmType = colorMapName switch
{
"Hot" => ColorMapType.Hot,
"Cool" => ColorMapType.Cool,
"Rainbow" => ColorMapType.Rainbow,
"HSV" => ColorMapType.Hsv,
"Turbo" => ColorMapType.Turbo,
"Inferno" => ColorMapType.Inferno,
"Magma" => ColorMapType.Magma,
"Plasma" => ColorMapType.Plasma,
"Bone" => ColorMapType.Bone,
"Ocean" => ColorMapType.Ocean,
"Spring" => ColorMapType.Spring,
"Summer" => ColorMapType.Summer,
"Autumn" => ColorMapType.Autumn,
"Winter" => ColorMapType.Winter,
_ => ColorMapType.Jet
};
using var colorMat = new Mat();
CvInvoke.ApplyColorMap(normalized.Mat, colorMat, cmType);
var colorImage = colorMat.ToImage<Bgr, byte>();
// 将彩色图像存入 OutputData,供 UI 显示
OutputData["PseudoColorImage"] = colorImage;
_logger.Debug("Process: ColorMap={ColorMap}, MinValue={Min}, MaxValue={Max}, InvertMap={Invert}",
colorMapName, minValue, maxValue, invertMap);
normalized.Dispose();
// 返回原始灰度图像(彩色图像通过 OutputData 传递)
return inputImage.Clone();
}
}
@@ -0,0 +1,80 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: GrayscaleProcessor.cs
// 描述: 灰度图转换算子,用于将彩色图像转换为灰度图像
// 功能:
// - 标准灰度转换(加权平均)
// - 平均值法
// - 最大值法
// - 最小值法
// 算法: 加权平均法 Gray = 0.299*R + 0.587*G + 0.114*B
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 灰度图转换算子
/// </summary>
public class GrayscaleProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<GrayscaleProcessor>();
public GrayscaleProcessor()
{
Name = LocalizationHelper.GetString("GrayscaleProcessor_Name");
Description = LocalizationHelper.GetString("GrayscaleProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("GrayscaleProcessor_Method"),
typeof(string),
"Weighted",
null,
null,
LocalizationHelper.GetString("GrayscaleProcessor_Method_Desc"),
new string[] { "Weighted", "Average", "Max", "Min" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
// 如果输入已经是灰度图,根据方法进行处理
var result = inputImage.Clone();
switch (method)
{
case "Average":
// 对于已经是灰度的图像,平均值法不改变图像
break;
case "Max":
// 增强亮度
result = result * 1.2;
break;
case "Min":
// 降低亮度
result = result * 0.8;
break;
case "Weighted":
default:
// 保持原样
break;
}
_logger.Debug("Process: Method = {Method}", method);
return result;
}
}
@@ -0,0 +1,67 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: MirrorProcessor.cs
// 描述: 镜像算子,用于图像翻转
// 功能:
// - 水平镜像(左右翻转)
// - 垂直镜像(上下翻转)
// - 对角镜像(水平+垂直翻转,等效180°旋转)
// 算法: 像素坐标映射
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 镜像算子
/// </summary>
public class MirrorProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MirrorProcessor>();
public MirrorProcessor()
{
Name = LocalizationHelper.GetString("MirrorProcessor_Name");
Description = LocalizationHelper.GetString("MirrorProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Direction", new ProcessorParameter(
"Direction",
LocalizationHelper.GetString("MirrorProcessor_Direction"),
typeof(string),
"Horizontal",
null,
null,
LocalizationHelper.GetString("MirrorProcessor_Direction_Desc"),
new string[] { "Horizontal", "Vertical", "Both" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string direction = GetParameter<string>("Direction");
var result = inputImage.Clone();
FlipType flipType = direction switch
{
"Vertical" => FlipType.Vertical,
"Both" => FlipType.Both,
_ => FlipType.Horizontal
};
CvInvoke.Flip(inputImage, result, flipType);
_logger.Debug("Process: Direction = {Direction}", direction);
return result;
}
}
@@ -0,0 +1,140 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: RotateProcessor.cs
// 描述: 图像旋转算子
// 功能:
// - 任意角度旋转
// - 支持保持原始尺寸或自适应扩展画布
// - 可选背景填充值
// - 支持双线性插值
// 算法: 仿射变换旋转
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 图像旋转算子
/// </summary>
public class RotateProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<RotateProcessor>();
public RotateProcessor()
{
Name = LocalizationHelper.GetString("RotateProcessor_Name");
Description = LocalizationHelper.GetString("RotateProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Angle", new ProcessorParameter(
"Angle",
LocalizationHelper.GetString("RotateProcessor_Angle"),
typeof(double),
90.0,
-360.0,
360.0,
LocalizationHelper.GetString("RotateProcessor_Angle_Desc")));
Parameters.Add("ExpandCanvas", new ProcessorParameter(
"ExpandCanvas",
LocalizationHelper.GetString("RotateProcessor_ExpandCanvas"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("RotateProcessor_ExpandCanvas_Desc")));
Parameters.Add("BackgroundValue", new ProcessorParameter(
"BackgroundValue",
LocalizationHelper.GetString("RotateProcessor_BackgroundValue"),
typeof(int),
0,
0,
255,
LocalizationHelper.GetString("RotateProcessor_BackgroundValue_Desc")));
Parameters.Add("Interpolation", new ProcessorParameter(
"Interpolation",
LocalizationHelper.GetString("RotateProcessor_Interpolation"),
typeof(string),
"Bilinear",
null,
null,
LocalizationHelper.GetString("RotateProcessor_Interpolation_Desc"),
new string[] { "Nearest", "Bilinear", "Bicubic" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double angle = GetParameter<double>("Angle");
bool expandCanvas = GetParameter<bool>("ExpandCanvas");
int bgValue = GetParameter<int>("BackgroundValue");
string interpolation = GetParameter<string>("Interpolation");
Inter interMethod = interpolation switch
{
"Nearest" => Inter.Nearest,
"Bicubic" => Inter.Cubic,
_ => Inter.Linear
};
int srcW = inputImage.Width;
int srcH = inputImage.Height;
PointF center = new PointF(srcW / 2.0f, srcH / 2.0f);
// 获取旋转矩阵
using var rotMat = new Mat();
CvInvoke.GetRotationMatrix2D(center, angle, 1.0, rotMat);
int dstW, dstH;
if (expandCanvas)
{
// 计算旋转后能容纳整幅图像的画布尺寸
double rad = Math.Abs(angle * Math.PI / 180.0);
double sinA = Math.Abs(Math.Sin(rad));
double cosA = Math.Abs(Math.Cos(rad));
dstW = (int)Math.Ceiling(srcW * cosA + srcH * sinA);
dstH = (int)Math.Ceiling(srcW * sinA + srcH * cosA);
// 调整旋转矩阵的平移分量,使图像居中
double[] m = new double[6];
rotMat.CopyTo(m);
m[2] += (dstW - srcW) / 2.0;
m[5] += (dstH - srcH) / 2.0;
// 写回矩阵
using var adjusted = new Mat(2, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
System.Runtime.InteropServices.Marshal.Copy(m, 0, adjusted.DataPointer, 6);
var result = new Image<Gray, byte>(dstW, dstH, new Gray(bgValue));
CvInvoke.WarpAffine(inputImage, result, adjusted, new Size(dstW, dstH),
interMethod, Warp.Default, BorderType.Constant, new MCvScalar(bgValue));
_logger.Debug("Process: Angle={Angle}, ExpandCanvas=true, Size={W}x{H}", angle, dstW, dstH);
return result;
}
else
{
dstW = srcW;
dstH = srcH;
var result = new Image<Gray, byte>(dstW, dstH, new Gray(bgValue));
CvInvoke.WarpAffine(inputImage, result, rotMat, new Size(dstW, dstH),
interMethod, Warp.Default, BorderType.Constant, new MCvScalar(bgValue));
_logger.Debug("Process: Angle={Angle}, ExpandCanvas=false", angle);
return result;
}
}
}
@@ -0,0 +1,106 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ThresholdProcessor.cs
// 描述: 阈值分割算子,用于图像二值化处理
// 功能:
// - 固定阈值二值化
// - Otsu自动阈值计算
// - 可调节阈值和最大值
// - 将灰度图像转换为二值图像
// 算法: 阈值分割、Otsu算法
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 阈值分割算子
/// </summary>
public class ThresholdProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ThresholdProcessor>();
public ThresholdProcessor()
{
Name = LocalizationHelper.GetString("ThresholdProcessor_Name");
Description = LocalizationHelper.GetString("ThresholdProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold",
LocalizationHelper.GetString("ThresholdProcessor_MinThreshold"),
typeof(int),
64,
0,
255,
LocalizationHelper.GetString("ThresholdProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold",
LocalizationHelper.GetString("ThresholdProcessor_MaxThreshold"),
typeof(int),
192,
0,
255,
LocalizationHelper.GetString("ThresholdProcessor_MaxThreshold_Desc")));
Parameters.Add("UseOtsu", new ProcessorParameter(
"UseOtsu",
LocalizationHelper.GetString("ThresholdProcessor_UseOtsu"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ThresholdProcessor_UseOtsu_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int minThreshold = GetParameter<int>("MinThreshold");
int maxThreshold = GetParameter<int>("MaxThreshold");
bool useOtsu = GetParameter<bool>("UseOtsu");
var result = new Image<Gray, byte>(inputImage.Size);
if (useOtsu)
{
// 使用Otsu算法
CvInvoke.Threshold(inputImage, result, minThreshold, 255, ThresholdType.Otsu);
_logger.Debug("Process: UseOtsu = true");
}
else
{
// 双阈值分割:介于MinThreshold和MaxThreshold之间的为前景(255),其他为背景(0)
byte[,,] inputData = inputImage.Data;
byte[,,] outputData = result.Data;
int height = inputImage.Height;
int width = inputImage.Width;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = inputData[y, x, 0];
outputData[y, x, 0] = (pixelValue >= minThreshold && pixelValue <= maxThreshold)
? (byte)255
: (byte)0;
}
}
_logger.Debug("Process: MinThreshold = {MinThreshold}, MaxThreshold = {MaxThreshold}",
minThreshold, maxThreshold);
}
return result;
}
}
@@ -0,0 +1,257 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ColorLayerProcessor.cs
// 描述: 色彩分层算子,将灰度图像按亮度区间分层
// 功能:
// - 将灰度图像按指定层数均匀分层
// - 支持自定义分层数(2~16层)
// - 支持均匀分层和基于 Otsu 的自适应分层
// - 可选保留原始灰度或映射为等间距灰度
// 算法: 灰度量化 / 多阈值分割
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 色彩分层算子,将灰度图像按亮度区间分为多个层级
/// </summary>
public class ColorLayerProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ColorLayerProcessor>();
public ColorLayerProcessor()
{
Name = LocalizationHelper.GetString("ColorLayerProcessor_Name");
Description = LocalizationHelper.GetString("ColorLayerProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Layers", new ProcessorParameter(
"Layers",
LocalizationHelper.GetString("ColorLayerProcessor_Layers"),
typeof(int),
4,
2,
16,
LocalizationHelper.GetString("ColorLayerProcessor_Layers_Desc")));
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("ColorLayerProcessor_Method"),
typeof(string),
"Uniform",
null,
null,
LocalizationHelper.GetString("ColorLayerProcessor_Method_Desc"),
new string[] { "Uniform", "Otsu" }));
Parameters.Add("OutputMode", new ProcessorParameter(
"OutputMode",
LocalizationHelper.GetString("ColorLayerProcessor_OutputMode"),
typeof(string),
"EqualSpaced",
null,
null,
LocalizationHelper.GetString("ColorLayerProcessor_OutputMode_Desc"),
new string[] { "EqualSpaced", "MidValue" }));
Parameters.Add("TargetLayer", new ProcessorParameter(
"TargetLayer",
LocalizationHelper.GetString("ColorLayerProcessor_TargetLayer"),
typeof(int),
0,
0,
16,
LocalizationHelper.GetString("ColorLayerProcessor_TargetLayer_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int layers = GetParameter<int>("Layers");
string method = GetParameter<string>("Method");
string outputMode = GetParameter<string>("OutputMode");
int targetLayer = GetParameter<int>("TargetLayer");
// 限制 targetLayer 范围
if (targetLayer < 0 || targetLayer > layers)
targetLayer = 0;
_logger.Debug("Process: Layers={Layers}, Method={Method}, OutputMode={OutputMode}, TargetLayer={TargetLayer}",
layers, method, outputMode, targetLayer);
// 计算分层阈值
byte[] thresholds = method == "Otsu"
? ComputeOtsuMultiThresholds(inputImage, layers)
: ComputeUniformThresholds(layers);
// 计算每层的输出灰度值
byte[] layerValues = ComputeLayerValues(thresholds, layers, outputMode);
// 应用分层映射
int width = inputImage.Width;
int height = inputImage.Height;
var result = new Image<Gray, byte>(width, height);
var srcData = inputImage.Data;
var dstData = result.Data;
if (targetLayer == 0)
{
// 输出全部层
Parallel.For(0, height, y =>
{
for (int x = 0; x < width; x++)
{
byte pixel = srcData[y, x, 0];
int layerIdx = GetLayerIndex(pixel, thresholds);
dstData[y, x, 0] = layerValues[layerIdx];
}
});
}
else
{
// 只输出指定层:选中层为 255(白),其余为 0(黑)
int target = targetLayer - 1; // 参数从1开始,内部索引从0开始
Parallel.For(0, height, y =>
{
for (int x = 0; x < width; x++)
{
byte pixel = srcData[y, x, 0];
int layerIdx = GetLayerIndex(pixel, thresholds);
dstData[y, x, 0] = (layerIdx == target) ? (byte)255 : (byte)0;
}
});
}
_logger.Debug("Process completed: {Layers} layers, target={TargetLayer}", layers, targetLayer);
return result;
}
/// <summary>
/// 均匀分层阈值:将 [0, 255] 等分
/// </summary>
private static byte[] ComputeUniformThresholds(int layers)
{
var thresholds = new byte[layers - 1];
double step = 256.0 / layers;
for (int i = 0; i < layers - 1; i++)
thresholds[i] = (byte)Math.Clamp((int)((i + 1) * step), 0, 255);
return thresholds;
}
/// <summary>
/// 基于 Otsu 的多阈值分层:递归二分
/// </summary>
private static byte[] ComputeOtsuMultiThresholds(Image<Gray, byte> image, int layers)
{
// 计算直方图
int[] histogram = new int[256];
var data = image.Data;
int h = image.Height, w = image.Width;
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
histogram[data[y, x, 0]]++;
// 递归 Otsu 分割
var thresholds = new List<byte>();
RecursiveOtsu(histogram, 0, 255, layers, thresholds);
thresholds.Sort();
return thresholds.ToArray();
}
/// <summary>
/// 递归 Otsu:在 [low, high] 范围内找最佳阈值,然后递归分割
/// </summary>
private static void RecursiveOtsu(int[] histogram, int low, int high, int layers, List<byte> thresholds)
{
if (layers <= 1 || low >= high)
return;
// 在 [low, high] 范围内找 Otsu 阈值
long totalPixels = 0;
long totalSum = 0;
for (int i = low; i <= high; i++)
{
totalPixels += histogram[i];
totalSum += (long)i * histogram[i];
}
if (totalPixels == 0) return;
long bgPixels = 0, bgSum = 0;
double maxVariance = 0;
int bestThreshold = (low + high) / 2;
for (int t = low; t < high; t++)
{
bgPixels += histogram[t];
bgSum += (long)t * histogram[t];
long fgPixels = totalPixels - bgPixels;
if (bgPixels == 0 || fgPixels == 0) continue;
double bgMean = (double)bgSum / bgPixels;
double fgMean = (double)(totalSum - bgSum) / fgPixels;
double variance = (double)bgPixels * fgPixels * (bgMean - fgMean) * (bgMean - fgMean);
if (variance > maxVariance)
{
maxVariance = variance;
bestThreshold = t;
}
}
thresholds.Add((byte)bestThreshold);
// 递归分割左右两半
int leftLayers = layers / 2;
int rightLayers = layers - leftLayers;
RecursiveOtsu(histogram, low, bestThreshold, leftLayers, thresholds);
RecursiveOtsu(histogram, bestThreshold + 1, high, rightLayers, thresholds);
}
/// <summary>
/// 计算每层的输出灰度值
/// </summary>
private static byte[] ComputeLayerValues(byte[] thresholds, int layers, string outputMode)
{
var values = new byte[layers];
if (outputMode == "EqualSpaced")
{
// 等间距输出:0, 255/(n-1), 2*255/(n-1), ..., 255
for (int i = 0; i < layers; i++)
values[i] = (byte)Math.Clamp((int)(255.0 * i / (layers - 1)), 0, 255);
}
else // MidValue
{
// 每层取区间中值
values[0] = (byte)(thresholds.Length > 0 ? thresholds[0] / 2 : 128);
for (int i = 1; i < layers - 1; i++)
values[i] = (byte)((thresholds[i - 1] + thresholds[i]) / 2);
values[layers - 1] = (byte)(thresholds.Length > 0 ? (thresholds[^1] + 255) / 2 : 128);
}
return values;
}
/// <summary>
/// 根据阈值数组确定像素所属层级
/// </summary>
private static int GetLayerIndex(byte pixel, byte[] thresholds)
{
for (int i = 0; i < thresholds.Length; i++)
{
if (pixel < thresholds[i])
return i;
}
return thresholds.Length;
}
}
@@ -0,0 +1,172 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ContrastProcessor.cs
// 描述: 对比度调整算子,用于增强图像对比度
// 功能:
// - 线性对比度和亮度调整
// - 自动对比度拉伸
// - CLAHE(对比度受限自适应直方图均衡化)
// - 支持多种对比度增强方法
// 算法: 线性变换、直方图均衡化、CLAHE
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 对比度调整算子
/// </summary>
public class ContrastProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ContrastProcessor>();
public ContrastProcessor()
{
Name = LocalizationHelper.GetString("ContrastProcessor_Name");
Description = LocalizationHelper.GetString("ContrastProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Contrast", new ProcessorParameter(
"Contrast",
LocalizationHelper.GetString("ContrastProcessor_Contrast"),
typeof(double),
1.0,
0.1,
3.0,
LocalizationHelper.GetString("ContrastProcessor_Contrast_Desc")));
Parameters.Add("Brightness", new ProcessorParameter(
"Brightness",
LocalizationHelper.GetString("ContrastProcessor_Brightness"),
typeof(int),
0,
-100,
100,
LocalizationHelper.GetString("ContrastProcessor_Brightness_Desc")));
Parameters.Add("AutoContrast", new ProcessorParameter(
"AutoContrast",
LocalizationHelper.GetString("ContrastProcessor_AutoContrast"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContrastProcessor_AutoContrast_Desc")));
Parameters.Add("UseCLAHE", new ProcessorParameter(
"UseCLAHE",
LocalizationHelper.GetString("ContrastProcessor_UseCLAHE"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContrastProcessor_UseCLAHE_Desc")));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("ContrastProcessor_ClipLimit"),
typeof(double),
2.0,
1.0,
10.0,
LocalizationHelper.GetString("ContrastProcessor_ClipLimit_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double contrast = GetParameter<double>("Contrast");
int brightness = GetParameter<int>("Brightness");
bool autoContrast = GetParameter<bool>("AutoContrast");
bool useCLAHE = GetParameter<bool>("UseCLAHE");
double clipLimit = GetParameter<double>("ClipLimit");
var result = inputImage.Clone();
if (useCLAHE)
{
result = ApplyCLAHE(inputImage, clipLimit);
}
else if (autoContrast)
{
result = AutoContrastStretch(inputImage);
}
else
{
result = inputImage * contrast + brightness;
}
_logger.Debug("Process: Contrast = {contrast},Brightness = {brightness}," +
"AutoContrast = {autoContrast},UseCLAHE = {useCLAHE}, ClipLimit = {clipLimit}", contrast, brightness, autoContrast, useCLAHE, clipLimit);
return result;
}
private Image<Gray, byte> AutoContrastStretch(Image<Gray, byte> inputImage)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(inputImage, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (minVal == 0 && maxVal == 255)
{
return inputImage.Clone();
}
var floatImage = inputImage.Convert<Gray, float>();
if (maxVal > minVal)
{
floatImage = (floatImage - minVal) * (255.0 / (maxVal - minVal));
}
_logger.Debug("AutoContrastStretch");
return floatImage.Convert<Gray, byte>();
}
private Image<Gray, byte> ApplyCLAHE(Image<Gray, byte> inputImage, double clipLimit)
{
int tileSize = 8;
int width = inputImage.Width;
int height = inputImage.Height;
int tilesX = (width + tileSize - 1) / tileSize;
int tilesY = (height + tileSize - 1) / tileSize;
var result = new Image<Gray, byte>(width, height);
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int x = tx * tileSize;
int y = ty * tileSize;
int w = Math.Min(tileSize, width - x);
int h = Math.Min(tileSize, height - y);
var roi = new System.Drawing.Rectangle(x, y, w, h);
inputImage.ROI = roi;
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
var equalizedTile = new Image<Gray, byte>(tile.Size);
CvInvoke.EqualizeHist(tile, equalizedTile);
result.ROI = roi;
equalizedTile.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
tile.Dispose();
equalizedTile.Dispose();
}
}
_logger.Debug("ApplyCLAHE");
return result;
}
}
@@ -0,0 +1,100 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: GammaProcessor.cs
// 描述: Gamma校正算子,用于调整图像亮度和对比度
// 功能:
// - Gamma非线性校正
// - 增益调整
// - 使用查找表(LUT)加速处理
// - 适用于图像显示和亮度调整
// 算法: Gamma校正公式 output = (input^(1/gamma)) * gain
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// Gamma校正算子
/// </summary>
public class GammaProcessor : ImageProcessorBase
{
private byte[] _lookupTable;
private static readonly ILogger _logger = Log.ForContext<GammaProcessor>();
public GammaProcessor()
{
Name = LocalizationHelper.GetString("GammaProcessor_Name");
Description = LocalizationHelper.GetString("GammaProcessor_Description");
_lookupTable = new byte[256];
}
protected override void InitializeParameters()
{
Parameters.Add("Gamma", new ProcessorParameter(
"Gamma",
LocalizationHelper.GetString("GammaProcessor_Gamma"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("GammaProcessor_Gamma_Desc")));
Parameters.Add("Gain", new ProcessorParameter(
"Gain",
LocalizationHelper.GetString("GammaProcessor_Gain"),
typeof(double),
1.0,
0.1,
3.0,
LocalizationHelper.GetString("GammaProcessor_Gain_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double gamma = GetParameter<double>("Gamma");
double gain = GetParameter<double>("Gain");
BuildLookupTable(gamma, gain);
var result = inputImage.Clone();
ApplyLookupTable(result);
_logger.Debug("Process:Gamma = {0}, Gain = {1}", gamma, gain);
return result;
}
private void BuildLookupTable(double gamma, double gain)
{
double invGamma = 1.0 / gamma;
for (int i = 0; i < 256; i++)
{
double normalized = i / 255.0;
double corrected = Math.Pow(normalized, invGamma) * gain;
int value = (int)(corrected * 255.0);
_lookupTable[i] = (byte)Math.Max(0, Math.Min(255, value));
}
_logger.Debug("Gamma and gain values recorded: gamma = {Gamma}, gain = {Gain}", gamma, gain);
}
private void ApplyLookupTable(Image<Gray, byte> image)
{
int width = image.Width;
int height = image.Height;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = image.Data[y, x, 0];
image.Data[y, x, 0] = _lookupTable[pixelValue];
}
}
}
}
@@ -0,0 +1,549 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: HDREnhancementProcessor.cs
// 描述: 高动态范围(HDR)图像增强算子
// 功能:
// - 局部色调映射(Local Tone Mapping
// - 自适应对数映射(Adaptive Logarithmic Mapping
// - Drago色调映射
// - 双边滤波色调映射
// - 增强图像暗部和亮部细节
// 算法: 基于色调映射的HDR增强
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 高动态范围图像增强算子
/// </summary>
public class HDREnhancementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HDREnhancementProcessor>();
public HDREnhancementProcessor()
{
Name = LocalizationHelper.GetString("HDREnhancementProcessor_Name");
Description = LocalizationHelper.GetString("HDREnhancementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HDREnhancementProcessor_Method"),
typeof(string),
"LocalToneMap",
null,
null,
LocalizationHelper.GetString("HDREnhancementProcessor_Method_Desc"),
new string[] { "LocalToneMap", "AdaptiveLog", "Drago", "BilateralToneMap" }));
Parameters.Add("Gamma", new ProcessorParameter(
"Gamma",
LocalizationHelper.GetString("HDREnhancementProcessor_Gamma"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Gamma_Desc")));
Parameters.Add("Saturation", new ProcessorParameter(
"Saturation",
LocalizationHelper.GetString("HDREnhancementProcessor_Saturation"),
typeof(double),
1.0,
0.0,
3.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Saturation_Desc")));
Parameters.Add("DetailBoost", new ProcessorParameter(
"DetailBoost",
LocalizationHelper.GetString("HDREnhancementProcessor_DetailBoost"),
typeof(double),
1.5,
0.0,
5.0,
LocalizationHelper.GetString("HDREnhancementProcessor_DetailBoost_Desc")));
Parameters.Add("SigmaSpace", new ProcessorParameter(
"SigmaSpace",
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaSpace"),
typeof(double),
20.0,
1.0,
100.0,
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaSpace_Desc")));
Parameters.Add("SigmaColor", new ProcessorParameter(
"SigmaColor",
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaColor"),
typeof(double),
30.0,
1.0,
100.0,
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaColor_Desc")));
Parameters.Add("Bias", new ProcessorParameter(
"Bias",
LocalizationHelper.GetString("HDREnhancementProcessor_Bias"),
typeof(double),
0.85,
0.0,
1.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Bias_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double gamma = GetParameter<double>("Gamma");
double saturation = GetParameter<double>("Saturation");
double detailBoost = GetParameter<double>("DetailBoost");
double sigmaSpace = GetParameter<double>("SigmaSpace");
double sigmaColor = GetParameter<double>("SigmaColor");
double bias = GetParameter<double>("Bias");
Image<Gray, byte> result;
switch (method)
{
case "AdaptiveLog":
result = AdaptiveLogarithmicMapping(inputImage, gamma, bias);
break;
case "Drago":
result = DragoToneMapping(inputImage, gamma, bias);
break;
case "BilateralToneMap":
result = BilateralToneMapping(inputImage, gamma, sigmaSpace, sigmaColor, detailBoost);
break;
default: // LocalToneMap
result = LocalToneMapping(inputImage, gamma, sigmaSpace, detailBoost, saturation);
break;
}
_logger.Debug("Process: Method={Method}, Gamma={Gamma}, Saturation={Saturation}, DetailBoost={DetailBoost}, SigmaSpace={SigmaSpace}, SigmaColor={SigmaColor}, Bias={Bias}",
method, gamma, saturation, detailBoost, sigmaSpace, sigmaColor, bias);
return result;
}
/// <summary>
/// 局部色调映射
/// 将图像分解为基础层(光照)和细节层,分别处理后合成
/// Base = GaussianBlur(log(I))
/// Detail = log(I) - Base
/// Output = exp(Base_compressed + Detail * boost)
/// </summary>
private Image<Gray, byte> LocalToneMapping(Image<Gray, byte> inputImage,
double gamma, double sigmaSpace, double detailBoost, double saturation)
{
int width = inputImage.Width;
int height = inputImage.Height;
// 转换为浮点并归一化到 (0, 1]
var floatImage = inputImage.Convert<Gray, float>();
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] = floatImage.Data[y, x, 0] / 255.0f + 0.001f;
// 对数域
var logImage = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
// 基础层:大尺度高斯模糊提取光照分量
int kernelSize = (int)(sigmaSpace * 6) | 1;
if (kernelSize < 3) kernelSize = 3;
var baseLayer = new Image<Gray, float>(width, height);
CvInvoke.GaussianBlur(logImage, baseLayer, new System.Drawing.Size(kernelSize, kernelSize), sigmaSpace);
// 细节层
var detailLayer = logImage - baseLayer;
// 压缩基础层的动态范围
double baseMin = double.MaxValue, baseMax = double.MinValue;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float v = baseLayer.Data[y, x, 0];
if (v < baseMin) baseMin = v;
if (v > baseMax) baseMax = v;
}
}
double baseRange = baseMax - baseMin;
if (baseRange < 0.001) baseRange = 0.001;
// 目标动态范围(对数域)
double targetRange = Math.Log(256.0);
double compressionFactor = targetRange / baseRange;
var compressedBase = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float normalized = (float)((baseLayer.Data[y, x, 0] - baseMin) / baseRange);
compressedBase.Data[y, x, 0] = (float)(normalized * targetRange + Math.Log(0.01));
}
}
// 合成:压缩后的基础层 + 增强的细节层
var combined = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float val = compressedBase.Data[y, x, 0] + detailLayer.Data[y, x, 0] * (float)detailBoost;
combined.Data[y, x, 0] = val;
}
}
// 指数变换回线性域
var linearResult = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)Math.Exp(combined.Data[y, x, 0]);
// Gamma校正
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
double maxVal = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (linearResult.Data[y, x, 0] > maxVal) maxVal = linearResult.Data[y, x, 0];
if (maxVal > 0)
{
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
double normalized = linearResult.Data[y, x, 0] / maxVal;
linearResult.Data[y, x, 0] = (float)(Math.Pow(normalized, invGamma) * maxVal);
}
}
}
// 饱和度增强(对比度微调)
if (Math.Abs(saturation - 1.0) > 0.01)
{
double mean = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
mean += linearResult.Data[y, x, 0];
mean /= (width * height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
double diff = linearResult.Data[y, x, 0] - mean;
linearResult.Data[y, x, 0] = (float)(mean + diff * saturation);
}
}
// 归一化到 [0, 255]
var result = NormalizeToByteImage(linearResult);
floatImage.Dispose();
logImage.Dispose();
baseLayer.Dispose();
detailLayer.Dispose();
compressedBase.Dispose();
combined.Dispose();
linearResult.Dispose();
return result;
}
/// <summary>
/// 自适应对数映射
/// 根据场景的整体亮度自适应调整对数映射曲线
/// L_out = (log(1 + L_in) / log(1 + L_max)) ^ (1/gamma)
/// 使用局部自适应:L_max 根据邻域计算
/// </summary>
private Image<Gray, byte> AdaptiveLogarithmicMapping(Image<Gray, byte> inputImage,
double gamma, double bias)
{
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
// 归一化到 [0, 1]
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] /= 255.0f;
// 计算全局最大亮度
float globalMax = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (floatImage.Data[y, x, 0] > globalMax)
globalMax = floatImage.Data[y, x, 0];
if (globalMax < 0.001f) globalMax = 0.001f;
// 计算对数平均亮度
double logAvg = 0;
int count = 0;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float v = floatImage.Data[y, x, 0];
if (v > 0.001f)
{
logAvg += Math.Log(v);
count++;
}
}
}
logAvg = Math.Exp(logAvg / Math.Max(count, 1));
// 自适应对数映射
// bias 控制暗部和亮部的平衡
double logBase = Math.Log(2.0 + 8.0 * Math.Pow(logAvg / globalMax, Math.Log(bias) / Math.Log(0.5)));
var result = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float lum = floatImage.Data[y, x, 0];
double mapped = Math.Log(1.0 + lum) / logBase;
result.Data[y, x, 0] = (float)mapped;
}
}
// Gamma校正
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result.Data[y, x, 0] = (float)Math.Pow(Math.Max(0, result.Data[y, x, 0]), invGamma);
}
var byteResult = NormalizeToByteImage(result);
floatImage.Dispose();
result.Dispose();
return byteResult;
}
/// <summary>
/// Drago色调映射
/// 使用自适应对数基底进行色调映射
/// L_out = log_base(1 + L_in) / log_base(1 + L_max)
/// base = 2 + 8 * (L_in / L_max) ^ (ln(bias) / ln(0.5))
/// </summary>
private Image<Gray, byte> DragoToneMapping(Image<Gray, byte> inputImage,
double gamma, double bias)
{
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
// 归一化到 [0, 1]
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] /= 255.0f;
// 全局最大亮度
float maxLum = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (floatImage.Data[y, x, 0] > maxLum)
maxLum = floatImage.Data[y, x, 0];
if (maxLum < 0.001f) maxLum = 0.001f;
double biasP = Math.Log(bias) / Math.Log(0.5);
double divider = Math.Log10(1.0 + maxLum);
if (divider < 0.001) divider = 0.001;
var result = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float lum = floatImage.Data[y, x, 0];
// 自适应对数基底
double adaptBase = 2.0 + 8.0 * Math.Pow(lum / maxLum, biasP);
double logAdapt = Math.Log(1.0 + lum) / Math.Log(adaptBase);
double mapped = logAdapt / divider;
result.Data[y, x, 0] = (float)Math.Max(0, Math.Min(1.0, mapped));
}
}
// Gamma校正
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result.Data[y, x, 0] = (float)Math.Pow(result.Data[y, x, 0], invGamma);
}
var byteResult = NormalizeToByteImage(result);
floatImage.Dispose();
result.Dispose();
return byteResult;
}
/// <summary>
/// 双边滤波色调映射
/// 使用双边滤波分离基础层和细节层
/// 双边滤波保边特性使得细节层更加精确
/// </summary>
private Image<Gray, byte> BilateralToneMapping(Image<Gray, byte> inputImage,
double gamma, double sigmaSpace, double sigmaColor, double detailBoost)
{
int width = inputImage.Width;
int height = inputImage.Height;
// 转换为浮点并取对数
var floatImage = inputImage.Convert<Gray, float>();
var logImage = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0] / 255.0f + 0.001);
// 双边滤波提取基础层(保边平滑)
int diameter = (int)(sigmaSpace * 2) | 1;
if (diameter < 3) diameter = 3;
if (diameter > 31) diameter = 31;
var baseLayer = new Image<Gray, float>(width, height);
// 转换为 byte 进行双边滤波,再转回 float
var logNorm = NormalizeToByteImage(logImage);
var baseNorm = new Image<Gray, byte>(width, height);
CvInvoke.BilateralFilter(logNorm, baseNorm, diameter, sigmaColor, sigmaSpace);
// 将基础层转回浮点对数域
double logMin = double.MaxValue, logMax = double.MinValue;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float v = logImage.Data[y, x, 0];
if (v < logMin) logMin = v;
if (v > logMax) logMax = v;
}
double logRange = logMax - logMin;
if (logRange < 0.001) logRange = 0.001;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
baseLayer.Data[y, x, 0] = (float)(baseNorm.Data[y, x, 0] / 255.0 * logRange + logMin);
// 细节层 = 对数图像 - 基础层
var detailLayer = logImage - baseLayer;
// 压缩基础层
double baseMin = double.MaxValue, baseMax = double.MinValue;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float v = baseLayer.Data[y, x, 0];
if (v < baseMin) baseMin = v;
if (v > baseMax) baseMax = v;
}
double bRange = baseMax - baseMin;
if (bRange < 0.001) bRange = 0.001;
double targetRange = Math.Log(256.0);
double compression = targetRange / bRange;
// 合成
var combined = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float compBase = (float)((baseLayer.Data[y, x, 0] - baseMin) * compression + Math.Log(0.01));
combined.Data[y, x, 0] = compBase + detailLayer.Data[y, x, 0] * (float)detailBoost;
}
// 指数变换回线性域
var linearResult = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)Math.Exp(combined.Data[y, x, 0]);
// Gamma校正
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
double maxVal = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (linearResult.Data[y, x, 0] > maxVal) maxVal = linearResult.Data[y, x, 0];
if (maxVal > 0)
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)(Math.Pow(linearResult.Data[y, x, 0] / maxVal, invGamma) * maxVal);
}
var result = NormalizeToByteImage(linearResult);
floatImage.Dispose();
logImage.Dispose();
logNorm.Dispose();
baseNorm.Dispose();
baseLayer.Dispose();
detailLayer.Dispose();
combined.Dispose();
linearResult.Dispose();
return result;
}
/// <summary>
/// 归一化浮点图像到字节图像
/// </summary>
private Image<Gray, byte> NormalizeToByteImage(Image<Gray, float> floatImage)
{
double minVal = double.MaxValue;
double maxVal = double.MinValue;
for (int y = 0; y < floatImage.Height; y++)
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
if (val < minVal) minVal = val;
if (val > maxVal) maxVal = val;
}
var result = new Image<Gray, byte>(floatImage.Size);
double range = maxVal - minVal;
if (range > 0)
{
for (int y = 0; y < floatImage.Height; y++)
for (int x = 0; x < floatImage.Width; x++)
{
int normalized = (int)((floatImage.Data[y, x, 0] - minVal) / range * 255.0);
result.Data[y, x, 0] = (byte)Math.Max(0, Math.Min(255, normalized));
}
}
return result;
}
}
@@ -0,0 +1,213 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: HierarchicalEnhancementProcessor.cs
// 描述: 层次增强算子,基于多尺度高斯分解对不同尺度细节独立增强
// 功能:
// - 将图像分解为多层细节层 + 基础层
// - 对每层细节独立控制增益
// - 支持基础层亮度调整和对比度限制
// 算法: 多尺度高斯差分分解与重建
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 层次增强算子,基于多尺度高斯差分对不同尺度的图像细节进行独立增强
/// </summary>
public class HierarchicalEnhancementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HierarchicalEnhancementProcessor>();
public HierarchicalEnhancementProcessor()
{
Name = LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Name");
Description = LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Levels", new ProcessorParameter(
"Levels",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Levels"),
typeof(int),
4,
2,
8,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Levels_Desc")));
Parameters.Add("FineGain", new ProcessorParameter(
"FineGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_FineGain"),
typeof(double),
2.0,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_FineGain_Desc")));
Parameters.Add("MediumGain", new ProcessorParameter(
"MediumGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_MediumGain"),
typeof(double),
1.5,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_MediumGain_Desc")));
Parameters.Add("CoarseGain", new ProcessorParameter(
"CoarseGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_CoarseGain"),
typeof(double),
1.0,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_CoarseGain_Desc")));
Parameters.Add("BaseGain", new ProcessorParameter(
"BaseGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_BaseGain"),
typeof(double),
1.0,
0.0,
3.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_BaseGain_Desc")));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_ClipLimit"),
typeof(double),
0.0,
0.0,
50.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_ClipLimit_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int levels = GetParameter<int>("Levels");
double fineGain = GetParameter<double>("FineGain");
double mediumGain = GetParameter<double>("MediumGain");
double coarseGain = GetParameter<double>("CoarseGain");
double baseGain = GetParameter<double>("BaseGain");
double clipLimit = GetParameter<double>("ClipLimit");
_logger.Debug("Process: Levels={Levels}, Fine={Fine}, Medium={Medium}, Coarse={Coarse}, Base={Base}, Clip={Clip}",
levels, fineGain, mediumGain, coarseGain, baseGain, clipLimit);
int h = inputImage.Height;
int w = inputImage.Width;
// === 多尺度高斯差分分解(全部在原始分辨率上操作,无需金字塔上下采样) ===
// 用递增 sigma 的高斯模糊生成平滑层序列:G0(原图), G1, G2, ..., G_n(基础层)
// 细节层 D_i = G_i - G_{i+1}
// 重建:output = sum(D_i * gain_i) + G_n * baseGain
// 计算每层的高斯 sigma(指数递增)
var sigmas = new double[levels];
for (int i = 0; i < levels; i++)
sigmas[i] = Math.Pow(2, i + 1); // 2, 4, 8, 16, ...
// 生成平滑层序列(float 数组,避免 Emgu float Image 的问题)
var smoothLayers = new float[levels + 1][]; // [0]=原图, [1..n]=高斯模糊
smoothLayers[0] = new float[h * w];
var srcData = inputImage.Data;
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
smoothLayers[0][row + x] = srcData[y, x, 0];
});
for (int i = 0; i < levels; i++)
{
int ksize = ((int)(sigmas[i] * 3)) | 1; // 确保奇数
if (ksize < 3) ksize = 3;
using var src = new Image<Gray, byte>(w, h);
// 从上一层 float 转 byte 做高斯模糊
var prevLayer = smoothLayers[i];
var sd = src.Data;
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
sd[y, x, 0] = (byte)Math.Clamp((int)Math.Round(prevLayer[row + x]), 0, 255);
});
using var dst = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(src, dst, new System.Drawing.Size(ksize, ksize), sigmas[i]);
smoothLayers[i + 1] = new float[h * w];
var dd = dst.Data;
var nextLayer = smoothLayers[i + 1];
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
nextLayer[row + x] = dd[y, x, 0];
});
}
// === 计算增益插值并直接重建 ===
var gains = new double[levels];
for (int i = 0; i < levels; i++)
{
double t = levels <= 1 ? 0.0 : (double)i / (levels - 1);
if (t <= 0.5)
{
double t2 = t * 2.0;
gains[i] = fineGain * (1.0 - t2) + mediumGain * t2;
}
else
{
double t2 = (t - 0.5) * 2.0;
gains[i] = mediumGain * (1.0 - t2) + coarseGain * t2;
}
}
// 重建:output = baseGain * G_n + sum(gain_i * (G_i - G_{i+1}))
float fBaseGain = (float)baseGain;
float fClip = (float)clipLimit;
var baseLayerData = smoothLayers[levels];
var result = new Image<Gray, byte>(w, h);
var resultData = result.Data;
// 预转换 gains 为 float
var fGains = new float[levels];
for (int i = 0; i < levels; i++)
fGains[i] = (float)gains[i];
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
{
int idx = row + x;
float val = baseLayerData[idx] * fBaseGain;
for (int i = 0; i < levels; i++)
{
float detail = smoothLayers[i][idx] - smoothLayers[i + 1][idx];
detail *= fGains[i];
if (fClip > 0)
detail = Math.Clamp(detail, -fClip, fClip);
val += detail;
}
resultData[y, x, 0] = (byte)Math.Clamp((int)Math.Round(val), 0, 255);
}
});
_logger.Debug("Process completed: {Levels} levels, output={W}x{H}", levels, w, h);
return result;
}
}
@@ -0,0 +1,142 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: HistogramEqualizationProcessor.cs
// 描述: 直方图均衡化算子,用于增强图像对比度
// 功能:
// - 全局直方图均衡化
// - 自适应直方图均衡化(CLAHE)
// - 限制对比度增强
// - 改善图像的整体对比度
// 算法: 直方图均衡化、CLAHE
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 直方图均衡化算子
/// </summary>
public class HistogramEqualizationProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HistogramEqualizationProcessor>();
public HistogramEqualizationProcessor()
{
Name = LocalizationHelper.GetString("HistogramEqualizationProcessor_Name");
Description = LocalizationHelper.GetString("HistogramEqualizationProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HistogramEqualizationProcessor_Method"),
typeof(string),
"Global",
null,
null,
LocalizationHelper.GetString("HistogramEqualizationProcessor_Method_Desc"),
new string[] { "Global", "CLAHE" }));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("HistogramEqualizationProcessor_ClipLimit"),
typeof(double),
2.0,
1.0,
10.0,
LocalizationHelper.GetString("HistogramEqualizationProcessor_ClipLimit_Desc")));
Parameters.Add("TileSize", new ProcessorParameter(
"TileSize",
LocalizationHelper.GetString("HistogramEqualizationProcessor_TileSize"),
typeof(int),
8,
4,
32,
LocalizationHelper.GetString("HistogramEqualizationProcessor_TileSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double clipLimit = GetParameter<double>("ClipLimit");
int tileSize = GetParameter<int>("TileSize");
Image<Gray, byte> result;
if (method == "CLAHE")
{
result = ApplyCLAHE(inputImage, clipLimit, tileSize);
}
else // Global
{
result = new Image<Gray, byte>(inputImage.Size);
CvInvoke.EqualizeHist(inputImage, result);
}
_logger.Debug("Process: Method = {Method}, ClipLimit = {ClipLimit}, TileSize = {TileSize}",
method, clipLimit, tileSize);
return result;
}
private Image<Gray, byte> ApplyCLAHE(Image<Gray, byte> inputImage, double clipLimit, int tileSize)
{
int width = inputImage.Width;
int height = inputImage.Height;
int tilesX = (width + tileSize - 1) / tileSize;
int tilesY = (height + tileSize - 1) / tileSize;
var result = new Image<Gray, byte>(width, height);
// 对每个tile进行直方图均衡化
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int x = tx * tileSize;
int y = ty * tileSize;
int w = Math.Min(tileSize, width - x);
int h = Math.Min(tileSize, height - y);
var roi = new System.Drawing.Rectangle(x, y, w, h);
inputImage.ROI = roi;
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
// 应用直方图均衡化
var equalizedTile = new Image<Gray, byte>(tile.Size);
CvInvoke.EqualizeHist(tile, equalizedTile);
// 应用限制(简化版本)
var floatTile = tile.Convert<Gray, float>();
var floatEqualized = equalizedTile.Convert<Gray, float>();
var diff = floatEqualized - floatTile;
var limited = floatTile + diff * Math.Min(clipLimit / 10.0, 1.0);
var limitedByte = limited.Convert<Gray, byte>();
// 复制到结果图像
result.ROI = roi;
limitedByte.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
tile.Dispose();
equalizedTile.Dispose();
floatTile.Dispose();
floatEqualized.Dispose();
diff.Dispose();
limited.Dispose();
limitedByte.Dispose();
}
}
return result;
}
}
@@ -0,0 +1,266 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: HistogramOverlayProcessor.cs
// 描述: 直方图叠加算子,计算灰度直方图并以蓝色柱状图绘制到结果图像左上角
// 功能:
// - 计算输入图像的灰度直方图
// - 将直方图绘制为蓝色半透明柱状图叠加到图像左上角
// - 输出直方图统计表格数据
// 算法: 灰度直方图统计 + 彩色图像叠加
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
using System.Text;
namespace ImageProcessing.Processors;
/// <summary>
/// 直方图叠加算子,计算灰度直方图并以蓝色柱状图绘制到结果图像左上角,同时输出统计表格
/// </summary>
public class HistogramOverlayProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HistogramOverlayProcessor>();
// 固定参数
private const int ChartWidth = 256; // 柱状图绘图区宽度
private const int ChartHeight = 200; // 柱状图绘图区高度
private const int AxisMarginLeft = 50; // Y轴标签预留宽度
private const int AxisMarginBottom = 25; // X轴标签预留高度
private const int Padding = 8; // 背景额外内边距
private const int PaddingRight = 25; // 右侧额外内边距(容纳X轴末尾刻度文字)
private const int Margin = 10; // 距图像左上角边距
private const float BgAlpha = 0.6f;
private const double FontScale = 0.35;
private const int FontThickness = 1;
public HistogramOverlayProcessor()
{
Name = LocalizationHelper.GetString("HistogramOverlayProcessor_Name");
Description = LocalizationHelper.GetString("HistogramOverlayProcessor_Description");
}
protected override void InitializeParameters()
{
// 无可调参数
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int h = inputImage.Height;
int w = inputImage.Width;
var srcData = inputImage.Data;
// === 1. 计算灰度直方图 ===
var hist = new int[256];
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
hist[srcData[y, x, 0]]++;
int maxCount = 0;
long totalPixels = (long)h * w;
for (int i = 0; i < 256; i++)
if (hist[i] > maxCount) maxCount = hist[i];
// === 2. 计算统计信息 ===
double mean = 0, variance = 0;
int minVal = 255, maxVal = 0;
int modeVal = 0, modeCount = 0;
long medianTarget = totalPixels / 2, cumulative = 0;
int medianVal = 0;
bool medianFound = false;
for (int i = 0; i < 256; i++)
{
if (hist[i] > 0)
{
if (i < minVal) minVal = i;
if (i > maxVal) maxVal = i;
}
if (hist[i] > modeCount) { modeCount = hist[i]; modeVal = i; }
mean += (double)i * hist[i];
cumulative += hist[i];
if (!medianFound && cumulative >= medianTarget) { medianVal = i; medianFound = true; }
}
mean /= totalPixels;
for (int i = 0; i < 256; i++)
variance += hist[i] * (i - mean) * (i - mean);
variance /= totalPixels;
double stdDev = Math.Sqrt(variance);
// === 3. 输出表格数据 ===
var sb = new StringBuilder();
sb.AppendLine("=== 灰度直方图统计 ===");
sb.AppendLine($"图像尺寸: {w} x {h}");
sb.AppendLine($"总像素数: {totalPixels}");
sb.AppendLine($"最小灰度: {minVal}");
sb.AppendLine($"最大灰度: {maxVal}");
sb.AppendLine($"平均灰度: {mean:F2}");
sb.AppendLine($"中位灰度: {medianVal}");
sb.AppendLine($"众数灰度: {modeVal} (出现 {modeCount} 次)");
sb.AppendLine($"标准差: {stdDev:F2}");
sb.AppendLine();
sb.AppendLine("灰度值\t像素数\t占比(%)");
for (int i = 0; i < 256; i++)
{
if (hist[i] > 0)
sb.AppendLine($"{i}\t{hist[i]}\t{(double)hist[i] / totalPixels * 100.0:F4}");
}
OutputData["HistogramTable"] = sb.ToString();
OutputData["Histogram"] = hist;
// === 4. 生成彩色叠加图像(蓝色柱状图 + XY轴坐标) ===
var colorImage = inputImage.Convert<Bgr, byte>();
var colorData = colorImage.Data;
// 布局:背景区域包含 Padding + Y轴标签 + 绘图区 + Padding(水平)
// Padding + 绘图区 + X轴标签 + Padding(垂直)
int totalW = Padding + AxisMarginLeft + ChartWidth + PaddingRight;
int totalH = Padding + ChartHeight + AxisMarginBottom + Padding;
int bgW = Math.Min(totalW, w - Margin);
int bgH = Math.Min(totalH, h - Margin);
if (bgW > Padding + AxisMarginLeft && bgH > Padding + AxisMarginBottom)
{
int plotW = Math.Min(ChartWidth, bgW - Padding - AxisMarginLeft - PaddingRight);
int plotH = Math.Min(ChartHeight, bgH - Padding - AxisMarginBottom - Padding);
if (plotW <= 0 || plotH <= 0) goto SkipOverlay;
// 绘图区左上角在图像中的坐标
int plotX0 = Margin + Padding + AxisMarginLeft;
int plotY0 = Margin + Padding;
// 计算每列柱高
double binWidth = (double)plotW / 256.0;
var barHeights = new int[plotW];
for (int px = 0; px < plotW; px++)
{
int bin = Math.Min((int)(px / binWidth), 255);
barHeights[px] = maxCount > 0 ? (int)((long)hist[bin] * (plotH - 1) / maxCount) : 0;
}
float alpha = BgAlpha;
float inv = 1.0f - alpha;
// 绘制半透明黑色背景(覆盖整个区域含坐标轴和内边距)
Parallel.For(0, bgH, dy =>
{
int imgY = Margin + dy;
if (imgY >= h) return;
for (int dx = 0; dx < bgW; dx++)
{
int imgX = Margin + dx;
if (imgX >= w) break;
colorData[imgY, imgX, 0] = (byte)(int)(colorData[imgY, imgX, 0] * inv);
colorData[imgY, imgX, 1] = (byte)(int)(colorData[imgY, imgX, 1] * inv);
colorData[imgY, imgX, 2] = (byte)(int)(colorData[imgY, imgX, 2] * inv);
}
});
// 绘制蓝色柱状图
Parallel.For(0, plotH, dy =>
{
int imgY = plotY0 + dy;
if (imgY >= h) return;
int rowFromBottom = plotH - 1 - dy;
for (int dx = 0; dx < plotW; dx++)
{
int imgX = plotX0 + dx;
if (imgX >= w) break;
if (rowFromBottom < barHeights[dx])
{
byte curB = colorData[imgY, imgX, 0];
byte curG = colorData[imgY, imgX, 1];
byte curR = colorData[imgY, imgX, 2];
colorData[imgY, imgX, 0] = (byte)Math.Clamp(curB + (int)(255 * alpha), 0, 255);
colorData[imgY, imgX, 1] = (byte)Math.Clamp(curG + (int)(50 * alpha), 0, 255);
colorData[imgY, imgX, 2] = (byte)Math.Clamp(curR + (int)(50 * alpha), 0, 255);
}
}
});
// === 5. 绘制坐标轴线和刻度标注 ===
var white = new MCvScalar(255, 255, 255);
var gray = new MCvScalar(180, 180, 180);
// Y轴线
CvInvoke.Line(colorImage,
new Point(plotX0, plotY0),
new Point(plotX0, plotY0 + plotH),
white, 1);
// X轴线
CvInvoke.Line(colorImage,
new Point(plotX0, plotY0 + plotH),
new Point(plotX0 + plotW, plotY0 + plotH),
white, 1);
// X轴刻度: 0, 64, 128, 192, 255
int[] xTicks = { 0, 64, 128, 192, 255 };
foreach (int tick in xTicks)
{
int tx = plotX0 + (int)(tick * binWidth);
if (tx >= w) break;
CvInvoke.Line(colorImage,
new Point(tx, plotY0 + plotH),
new Point(tx, plotY0 + plotH + 4),
white, 1);
string label = tick.ToString();
CvInvoke.PutText(colorImage, label,
new Point(tx - 8, plotY0 + plotH + 18),
FontFace.HersheySimplex, FontScale, white, FontThickness);
}
// Y轴刻度: 0%, 25%, 50%, 75%, 100%
for (int i = 0; i <= 4; i++)
{
int val = maxCount * i / 4;
int ty = plotY0 + plotH - (int)((long)plotH * i / 4);
CvInvoke.Line(colorImage,
new Point(plotX0 - 4, ty),
new Point(plotX0, ty),
white, 1);
// 网格虚线
if (i > 0 && i < 4)
{
for (int gx = plotX0 + 2; gx < plotX0 + plotW; gx += 6)
{
int gxEnd = Math.Min(gx + 2, plotX0 + plotW);
CvInvoke.Line(colorImage,
new Point(gx, ty),
new Point(gxEnd, ty),
gray, 1);
}
}
string label = FormatCount(val);
CvInvoke.PutText(colorImage, label,
new Point(Margin + Padding, ty + 4),
FontFace.HersheySimplex, FontScale, white, FontThickness);
}
}
SkipOverlay:
OutputData["PseudoColorImage"] = colorImage;
_logger.Debug("Process completed: histogram overlay, mean={Mean:F2}, stdDev={Std:F2}", mean, stdDev);
return inputImage.Clone();
}
/// <summary>
/// 格式化像素计数为紧凑字符串(如 12345 → "12.3K"
/// </summary>
private static string FormatCount(int count)
{
if (count >= 1_000_000) return $"{count / 1_000_000.0:F1}M";
if (count >= 1_000) return $"{count / 1_000.0:F1}K";
return count.ToString();
}
}
@@ -0,0 +1,320 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: RetinexProcessor.cs
// 描述: 基于Retinex的多尺度阴影校正算子
// 功能:
// - 单尺度Retinex (SSR)
// - 多尺度Retinex (MSR)
// - 带色彩恢复的多尺度Retinex (MSRCR)
// - 光照不均匀校正
// - 阴影去除
// 算法: Retinex理论 - 将图像分解为反射分量和光照分量
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// Retinex多尺度阴影校正算子
/// </summary>
public class RetinexProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<RetinexProcessor>();
public RetinexProcessor()
{
Name = LocalizationHelper.GetString("RetinexProcessor_Name");
Description = LocalizationHelper.GetString("RetinexProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("RetinexProcessor_Method"),
typeof(string),
"MSR",
null,
null,
LocalizationHelper.GetString("RetinexProcessor_Method_Desc"),
new string[] { "SSR", "MSR", "MSRCR" }));
Parameters.Add("Sigma1", new ProcessorParameter(
"Sigma1",
LocalizationHelper.GetString("RetinexProcessor_Sigma1"),
typeof(double),
15.0,
1.0,
100.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma1_Desc")));
Parameters.Add("Sigma2", new ProcessorParameter(
"Sigma2",
LocalizationHelper.GetString("RetinexProcessor_Sigma2"),
typeof(double),
80.0,
1.0,
200.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma2_Desc")));
Parameters.Add("Sigma3", new ProcessorParameter(
"Sigma3",
LocalizationHelper.GetString("RetinexProcessor_Sigma3"),
typeof(double),
250.0,
1.0,
500.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma3_Desc")));
Parameters.Add("Gain", new ProcessorParameter(
"Gain",
LocalizationHelper.GetString("RetinexProcessor_Gain"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("RetinexProcessor_Gain_Desc")));
Parameters.Add("Offset", new ProcessorParameter(
"Offset",
LocalizationHelper.GetString("RetinexProcessor_Offset"),
typeof(int),
0,
-100,
100,
LocalizationHelper.GetString("RetinexProcessor_Offset_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double sigma1 = GetParameter<double>("Sigma1");
double sigma2 = GetParameter<double>("Sigma2");
double sigma3 = GetParameter<double>("Sigma3");
double gain = GetParameter<double>("Gain");
int offset = GetParameter<int>("Offset");
Image<Gray, byte> result;
if (method == "SSR")
{
// 单尺度Retinex
result = SingleScaleRetinex(inputImage, sigma2, gain, offset);
}
else if (method == "MSR")
{
// 多尺度Retinex
result = MultiScaleRetinex(inputImage, new[] { sigma1, sigma2, sigma3 }, gain, offset);
}
else // MSRCR
{
// 带色彩恢复的多尺度Retinex
result = MultiScaleRetinexCR(inputImage, new[] { sigma1, sigma2, sigma3 }, gain, offset);
}
_logger.Debug("Process: Method = {Method}, Sigma1 = {Sigma1}, Sigma2 = {Sigma2}, Sigma3 = {Sigma3}, Gain = {Gain}, Offset = {Offset}",
method, sigma1, sigma2, sigma3, gain, offset);
return result;
}
/// <summary>
/// 单尺度Retinex (SSR)
/// R(x,y) = log(I(x,y)) - log(I(x,y) * G(x,y))
/// </summary>
private Image<Gray, byte> SingleScaleRetinex(Image<Gray, byte> inputImage, double sigma, double gain, int offset)
{
// 转换为浮点图像并添加小常数避免log(0)
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
floatImage = floatImage + 1.0f;
// 计算log(I)
Image<Gray, float> logImage = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
}
}
// 高斯模糊得到光照分量
Image<Gray, float> blurred = new Image<Gray, float>(inputImage.Size);
int kernelSize = (int)(sigma * 6) | 1; // 确保为奇数
if (kernelSize < 3) kernelSize = 3;
CvInvoke.GaussianBlur(floatImage, blurred, new System.Drawing.Size(kernelSize, kernelSize), sigma);
// 计算log(I * G)
Image<Gray, float> logBlurred = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logBlurred.Data[y, x, 0] = (float)Math.Log(blurred.Data[y, x, 0]);
}
}
// R = log(I) - log(I*G)
Image<Gray, float> retinex = logImage - logBlurred;
// 应用增益和偏移
retinex = retinex * gain + offset;
// 归一化到0-255
Image<Gray, byte> result = NormalizeToByteImage(retinex);
floatImage.Dispose();
logImage.Dispose();
blurred.Dispose();
logBlurred.Dispose();
retinex.Dispose();
return result;
}
/// <summary>
/// 多尺度Retinex (MSR)
/// MSR = Σ(w_i * SSR_i) / N
/// </summary>
private Image<Gray, byte> MultiScaleRetinex(Image<Gray, byte> inputImage, double[] sigmas, double gain, int offset)
{
// 转换为浮点图像
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
floatImage = floatImage + 1.0f;
// 计算log(I)
Image<Gray, float> logImage = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
}
}
// 累加多个尺度的结果
Image<Gray, float> msrResult = new Image<Gray, float>(inputImage.Size);
msrResult.SetZero();
foreach (double sigma in sigmas)
{
// 高斯模糊
Image<Gray, float> blurred = new Image<Gray, float>(inputImage.Size);
int kernelSize = (int)(sigma * 6) | 1;
if (kernelSize < 3) kernelSize = 3;
CvInvoke.GaussianBlur(floatImage, blurred, new System.Drawing.Size(kernelSize, kernelSize), sigma);
// 计算log(I*G)
Image<Gray, float> logBlurred = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logBlurred.Data[y, x, 0] = (float)Math.Log(blurred.Data[y, x, 0]);
}
}
// 累加 SSR
msrResult = msrResult + (logImage - logBlurred);
blurred.Dispose();
logBlurred.Dispose();
}
// 平均
msrResult = msrResult / sigmas.Length;
// 应用增益和偏移
msrResult = msrResult * gain + offset;
// 归一化
Image<Gray, byte> result = NormalizeToByteImage(msrResult);
floatImage.Dispose();
logImage.Dispose();
msrResult.Dispose();
return result;
}
/// <summary>
/// 带色彩恢复的多尺度Retinex (MSRCR)
/// 对于灰度图像,使用简化版本
/// </summary>
private Image<Gray, byte> MultiScaleRetinexCR(Image<Gray, byte> inputImage, double[] sigmas, double gain, int offset)
{
// 先执行MSR
Image<Gray, byte> msrResult = MultiScaleRetinex(inputImage, sigmas, gain, offset);
// 对于灰度图像,色彩恢复简化为对比度增强
Image<Gray, float> floatMsr = msrResult.Convert<Gray, float>();
Image<Gray, float> floatInput = inputImage.Convert<Gray, float>();
// 简单的色彩恢复:增强局部对比度
Image<Gray, float> enhanced = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
float msr = floatMsr.Data[y, x, 0];
float original = floatInput.Data[y, x, 0];
// 色彩恢复因子
float c = (float)Math.Log(original + 1.0) / (float)Math.Log(128.0);
enhanced.Data[y, x, 0] = msr * c;
}
}
Image<Gray, byte> result = NormalizeToByteImage(enhanced);
msrResult.Dispose();
floatMsr.Dispose();
floatInput.Dispose();
enhanced.Dispose();
return result;
}
/// <summary>
/// 归一化浮点图像到字节图像
/// </summary>
private Image<Gray, byte> NormalizeToByteImage(Image<Gray, float> floatImage)
{
// 找到最小值和最大值
double minVal = double.MaxValue;
double maxVal = double.MinValue;
for (int y = 0; y < floatImage.Height; y++)
{
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
if (val < minVal) minVal = val;
if (val > maxVal) maxVal = val;
}
}
// 归一化到0-255
Image<Gray, byte> result = new Image<Gray, byte>(floatImage.Size);
double range = maxVal - minVal;
if (range > 0)
{
for (int y = 0; y < floatImage.Height; y++)
{
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
int normalized = (int)((val - minVal) / range * 255.0);
result.Data[y, x, 0] = (byte)Math.Max(0, Math.Min(255, normalized));
}
}
}
return result;
}
}
@@ -0,0 +1,141 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: SharpenProcessor.cs
// 描述: 锐化算子,用于增强图像边缘和细节
// 功能:
// - 拉普拉斯锐化
// - 非锐化掩蔽(Unsharp Masking
// - 可调节锐化强度
// - 支持多种锐化核
// 算法: 拉普拉斯算子、非锐化掩蔽
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 锐化算子
/// </summary>
public class SharpenProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SharpenProcessor>();
public SharpenProcessor()
{
Name = LocalizationHelper.GetString("SharpenProcessor_Name");
Description = LocalizationHelper.GetString("SharpenProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("SharpenProcessor_Method"),
typeof(string),
"Laplacian",
null,
null,
LocalizationHelper.GetString("SharpenProcessor_Method_Desc"),
new string[] { "Laplacian", "UnsharpMask" }));
Parameters.Add("Strength", new ProcessorParameter(
"Strength",
LocalizationHelper.GetString("SharpenProcessor_Strength"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("SharpenProcessor_Strength_Desc")));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("SharpenProcessor_KernelSize"),
typeof(int),
3,
1,
15,
LocalizationHelper.GetString("SharpenProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double strength = GetParameter<double>("Strength");
int kernelSize = GetParameter<int>("KernelSize");
if (kernelSize % 2 == 0) kernelSize++;
Image<Gray, byte> result;
if (method == "UnsharpMask")
{
result = ApplyUnsharpMask(inputImage, kernelSize, strength);
}
else // Laplacian
{
result = ApplyLaplacianSharpening(inputImage, strength);
}
_logger.Debug("Process: Method = {Method}, Strength = {Strength}, KernelSize = {KernelSize}",
method, strength, kernelSize);
return result;
}
private Image<Gray, byte> ApplyLaplacianSharpening(Image<Gray, byte> inputImage, double strength)
{
// 计算拉普拉斯算子
var laplacian = new Image<Gray, float>(inputImage.Size);
CvInvoke.Laplacian(inputImage, laplacian, DepthType.Cv32F, 1);
// 转换为字节类型
var laplacianByte = laplacian.Convert<Gray, byte>();
// 将拉普拉斯结果加到原图上进行锐化
var floatImage = inputImage.Convert<Gray, float>();
var sharpened = floatImage + laplacian * strength;
// 限制范围并转换回字节类型
var result = sharpened.Convert<Gray, byte>();
laplacian.Dispose();
laplacianByte.Dispose();
floatImage.Dispose();
sharpened.Dispose();
return result;
}
private Image<Gray, byte> ApplyUnsharpMask(Image<Gray, byte> inputImage, int kernelSize, double strength)
{
// 创建模糊图像
var blurred = new Image<Gray, byte>(inputImage.Size);
CvInvoke.GaussianBlur(inputImage, blurred,
new System.Drawing.Size(kernelSize, kernelSize), 0);
// 计算差异(细节)
var floatInput = inputImage.Convert<Gray, float>();
var floatBlurred = blurred.Convert<Gray, float>();
var detail = floatInput - floatBlurred;
// 将细节加回原图
var sharpened = floatInput + detail * strength;
// 转换回字节类型
var result = sharpened.Convert<Gray, byte>();
blurred.Dispose();
floatInput.Dispose();
floatBlurred.Dispose();
detail.Dispose();
sharpened.Dispose();
return result;
}
}
@@ -0,0 +1,127 @@
// ============================================================================
// Copyright © 2016-2025 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: SubPixelZoomProcessor.cs
// 描述: 亚像素放大算子,通过高质量插值实现图像的亚像素级放大
// 功能:
// - 支持任意倍率放大(含小数倍率如 1.5x、2.3x)
// - 多种插值方法(最近邻、双线性、双三次、Lanczos)
// - 可选锐化补偿(抵消插值模糊)
// - 可选指定输出尺寸
// 算法: 基于 OpenCV Resize 的高质量插值放大
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 亚像素放大算子
/// </summary>
public class SubPixelZoomProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SubPixelZoomProcessor>();
public SubPixelZoomProcessor()
{
Name = LocalizationHelper.GetString("SubPixelZoomProcessor_Name");
Description = LocalizationHelper.GetString("SubPixelZoomProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("ScaleFactor", new ProcessorParameter(
"ScaleFactor",
LocalizationHelper.GetString("SubPixelZoomProcessor_ScaleFactor"),
typeof(double),
2.0,
1.0,
16.0,
LocalizationHelper.GetString("SubPixelZoomProcessor_ScaleFactor_Desc")));
Parameters.Add("Interpolation", new ProcessorParameter(
"Interpolation",
LocalizationHelper.GetString("SubPixelZoomProcessor_Interpolation"),
typeof(string),
"Lanczos",
null,
null,
LocalizationHelper.GetString("SubPixelZoomProcessor_Interpolation_Desc"),
new string[] { "Nearest", "Bilinear", "Bicubic", "Lanczos" }));
Parameters.Add("SharpenAfter", new ProcessorParameter(
"SharpenAfter",
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenAfter"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenAfter_Desc")));
Parameters.Add("SharpenStrength", new ProcessorParameter(
"SharpenStrength",
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenStrength"),
typeof(double),
0.5,
0.1,
3.0,
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenStrength_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double scaleFactor = GetParameter<double>("ScaleFactor");
string interpolation = GetParameter<string>("Interpolation");
bool sharpenAfter = GetParameter<bool>("SharpenAfter");
double sharpenStrength = GetParameter<double>("SharpenStrength");
Inter interMethod = interpolation switch
{
"Nearest" => Inter.Nearest,
"Bilinear" => Inter.Linear,
"Bicubic" => Inter.Cubic,
_ => Inter.Lanczos4
};
int newWidth = (int)Math.Round(inputImage.Width * scaleFactor);
int newHeight = (int)Math.Round(inputImage.Height * scaleFactor);
// 确保最小尺寸为 1
newWidth = Math.Max(1, newWidth);
newHeight = Math.Max(1, newHeight);
var result = new Image<Gray, byte>(newWidth, newHeight);
CvInvoke.Resize(inputImage, result, new Size(newWidth, newHeight), 0, 0, interMethod);
// 锐化补偿
if (sharpenAfter)
{
// Unsharp Masking: result = result + strength * (result - blur)
int ksize = Math.Max(3, (int)(scaleFactor * 2) | 1); // 奇数核
using var blurred = result.SmoothGaussian(ksize);
for (int y = 0; y < newHeight; y++)
{
for (int x = 0; x < newWidth; x++)
{
float val = result.Data[y, x, 0];
float blur = blurred.Data[y, x, 0];
float sharpened = val + (float)(sharpenStrength * (val - blur));
result.Data[y, x, 0] = (byte)Math.Clamp((int)sharpened, 0, 255);
}
}
}
_logger.Debug("Process: Scale={Scale}, Interp={Interp}, Size={W}x{H}, Sharpen={Sharpen}",
scaleFactor, interpolation, newWidth, newHeight, sharpenAfter);
return result;
}
}
@@ -0,0 +1,319 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: SuperResolutionProcessor.cs
// 描述: 基于深度学习的超分辨率算子
// 功能:
// - 支持 EDSR 和 FSRCNN 超分辨率模型(ONNX 格式)
// - 支持 2x、3x、4x 放大倍率
// - 灰度图像自动转换为三通道输入,推理后转回灰度
// - 模型文件自动搜索,支持自定义路径
// - 使用 Microsoft.ML.OnnxRuntime 进行推理
// 算法: EDSR (Enhanced Deep Residual SR) / FSRCNN (Fast SR CNN)
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Microsoft.ML.OnnxRuntime;
using Microsoft.ML.OnnxRuntime.Tensors;
using Serilog;
using System.IO;
namespace ImageProcessing.Processors;
/// <summary>
/// 基于深度学习的超分辨率算子(EDSR / FSRCNN),使用 ONNX Runtime 推理
/// </summary>
public class SuperResolutionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SuperResolutionProcessor>();
// 会话缓存,避免重复加载
private static InferenceSession? _cachedSession;
private static string _cachedModelKey = string.Empty;
public SuperResolutionProcessor()
{
Name = LocalizationHelper.GetString("SuperResolutionProcessor_Name");
Description = LocalizationHelper.GetString("SuperResolutionProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Model", new ProcessorParameter(
"Model",
LocalizationHelper.GetString("SuperResolutionProcessor_Model"),
typeof(string),
"FSRCNN",
null,
null,
LocalizationHelper.GetString("SuperResolutionProcessor_Model_Desc"),
new string[] { "EDSR", "FSRCNN" }));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("SuperResolutionProcessor_Scale"),
typeof(string),
"2",
null,
null,
LocalizationHelper.GetString("SuperResolutionProcessor_Scale_Desc"),
new string[] { "2", "3", "4" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string model = GetParameter<string>("Model");
int scale = int.Parse(GetParameter<string>("Scale"));
// 查找模型文件
string modelPath = FindModelFile(model, scale);
if (string.IsNullOrEmpty(modelPath))
{
_logger.Error("Model file not found: {Model}_x{Scale}.onnx", model, scale);
throw new FileNotFoundException(
$"超分辨率模型文件未找到: {model}_x{scale}.onnx\n" +
$"请将模型文件放置到以下任一目录:\n" +
$" 1. 程序目录/Models/\n" +
$" 2. 程序目录/\n" +
$"模型需要 ONNX 格式。\n" +
$"可使用 tf2onnx 从 .pb 转换:\n" +
$" pip install tf2onnx\n" +
$" python -m tf2onnx.convert --input {model}_x{scale}.pb --output {model}_x{scale}.onnx --inputs input:0 --outputs output:0");
}
// 加载或复用会话
string modelKey = $"{model}_{scale}";
InferenceSession session;
if (_cachedModelKey == modelKey && _cachedSession != null)
{
session = _cachedSession;
_logger.Debug("Reusing cached session: {ModelKey}", modelKey);
}
else
{
_cachedSession?.Dispose();
var options = new SessionOptions();
options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_ALL;
try
{
options.AppendExecutionProvider_CUDA(0);
_logger.Information("Using CUDA GPU for inference");
}
catch
{
_logger.Warning("CUDA not available, falling back to CPU");
}
session = new InferenceSession(modelPath, options);
_cachedSession = session;
_cachedModelKey = modelKey;
// 记录实际使用的 Execution Provider
var providers = session.ModelMetadata?.CustomMetadataMap;
_logger.Information("Loaded ONNX model: {ModelPath}, Providers: {Providers}",
modelPath, string.Join(", ", session.GetType().Name));
}
int h = inputImage.Height;
int w = inputImage.Width;
_logger.Information("Input image size: {W}x{H}, Model: {Model}, Scale: {Scale}", w, h, model, scale);
// 对大图使用分块推理策略,避免单次推理过慢/OOM
const int TileSize = 256;
bool useTiling = (model.StartsWith("EDSR", StringComparison.OrdinalIgnoreCase)) && (h > TileSize || w > TileSize);
if (useTiling)
{
return ProcessTiled(session, inputImage, scale, TileSize);
}
return ProcessSingle(session, inputImage, scale);
}
/// <summary>
/// 单次推理(小图或 FSRCNN)
/// </summary>
private Image<Gray, byte> ProcessSingle(InferenceSession session, Image<Gray, byte> inputImage, int scale)
{
int h = inputImage.Height;
int w = inputImage.Width;
// 获取模型输入信息
string inputName = session.InputMetadata.Keys.First();
var inputMeta = session.InputMetadata[inputName];
int[] dims = inputMeta.Dimensions;
// dims 格式: [1, H, W, C] (NHWC)C 可能是 1 或 3
int inputChannels = dims[^1]; // 最后一维是通道数
// 构建输入 tensor: [1, H, W, C] (NHWC)
// 使用底层数组 + Parallel.For 避免逐元素索引开销
DenseTensor<float> inputTensor;
if (inputChannels == 1)
{
// FSRCNN: 单通道灰度输入
inputTensor = new DenseTensor<float>(new[] { 1, h, w, 1 });
float[] buf = inputTensor.Buffer.ToArray();
var imgData = inputImage.Data;
Parallel.For(0, h, y =>
{
int rowOffset = y * w;
for (int x = 0; x < w; x++)
buf[rowOffset + x] = imgData[y, x, 0];
});
inputTensor = new DenseTensor<float>(buf, new[] { 1, h, w, 1 });
}
else
{
// EDSR: 三通道 BGR 输入
using var colorInput = new Image<Bgr, byte>(w, h);
CvInvoke.CvtColor(inputImage, colorInput, ColorConversion.Gray2Bgr);
var buf = new float[h * w * 3];
var imgData = colorInput.Data;
Parallel.For(0, h, y =>
{
int rowOffset = y * w * 3;
for (int x = 0; x < w; x++)
{
int px = rowOffset + x * 3;
buf[px] = imgData[y, x, 0];
buf[px + 1] = imgData[y, x, 1];
buf[px + 2] = imgData[y, x, 2];
}
});
inputTensor = new DenseTensor<float>(buf, new[] { 1, h, w, 3 });
}
// 推理
var inputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor(inputName, inputTensor)
};
using var results = session.Run(inputs);
var outputTensor = results.First().AsTensor<float>();
// 输出 shape: [1, C, H*scale, W*scale] (NCHW,模型输出经过 Transpose)
var shape = outputTensor.Dimensions;
int outC = shape[1];
int outH = shape[2];
int outW = shape[3];
// 转换为灰度图像
// 使用 Parallel.For + 直接内存操作
Image<Gray, byte> result;
if (outC == 1)
{
// FSRCNN: 单通道输出 [1, 1, outH, outW]
result = new Image<Gray, byte>(outW, outH);
var outData = result.Data;
Parallel.For(0, outH, y =>
{
for (int x = 0; x < outW; x++)
outData[y, x, 0] = (byte)Math.Clamp((int)outputTensor[0, 0, y, x], 0, 255);
});
}
else
{
// EDSR: 三通道输出 [1, 3, outH, outW] → 灰度
// 直接计算灰度值,跳过中间 BGR 图像分配
result = new Image<Gray, byte>(outW, outH);
var outData = result.Data;
Parallel.For(0, outH, y =>
{
for (int x = 0; x < outW; x++)
{
float b = outputTensor[0, 0, y, x];
float g = outputTensor[0, 1, y, x];
float r = outputTensor[0, 2, y, x];
// BT.601 灰度公式: 0.299*R + 0.587*G + 0.114*B
int gray = (int)(0.299f * r + 0.587f * g + 0.114f * b);
outData[y, x, 0] = (byte)Math.Clamp(gray, 0, 255);
}
});
}
_logger.Debug("ProcessSingle: Scale={Scale}, Output={W}x{H}", scale, outW, outH);
return result;
}
/// <summary>
/// 分块推理(大图 EDSR),将图像切成小块分别推理后拼接
/// </summary>
private Image<Gray, byte> ProcessTiled(InferenceSession session, Image<Gray, byte> inputImage, int scale, int tileSize)
{
int h = inputImage.Height;
int w = inputImage.Width;
int overlap = 8; // 重叠像素,减少拼接边缘伪影
var result = new Image<Gray, byte>(w * scale, h * scale);
int tilesX = (int)Math.Ceiling((double)w / (tileSize - overlap));
int tilesY = (int)Math.Ceiling((double)h / (tileSize - overlap));
_logger.Information("Tiled processing: {TilesX}x{TilesY} tiles, tileSize={TileSize}", tilesX, tilesY, tileSize);
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int srcX = Math.Min(tx * (tileSize - overlap), w - tileSize);
int srcY = Math.Min(ty * (tileSize - overlap), h - tileSize);
srcX = Math.Max(srcX, 0);
srcY = Math.Max(srcY, 0);
int tw = Math.Min(tileSize, w - srcX);
int th = Math.Min(tileSize, h - srcY);
// 裁剪 tile
inputImage.ROI = new System.Drawing.Rectangle(srcX, srcY, tw, th);
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
// 推理单个 tile
var srTile = ProcessSingle(session, tile, scale);
tile.Dispose();
// 写入结果
int dstX = srcX * scale;
int dstY = srcY * scale;
result.ROI = new System.Drawing.Rectangle(dstX, dstY, srTile.Width, srTile.Height);
srTile.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
srTile.Dispose();
}
}
_logger.Debug("ProcessTiled: Scale={Scale}, Output={W}x{H}", scale, result.Width, result.Height);
return result;
}
/// <summary>
/// 查找模型文件,按优先级搜索多个目录(.onnx 格式)
/// </summary>
private static string FindModelFile(string model, int scale)
{
string baseDir = AppDomain.CurrentDomain.BaseDirectory;
string fileName = $"{model}_x{scale}.onnx";
string[] searchPaths = new[]
{
Path.Combine(baseDir, "Models", fileName),
Path.Combine(baseDir, fileName),
Path.Combine(Directory.GetCurrentDirectory(), "Models", fileName),
Path.Combine(Directory.GetCurrentDirectory(), fileName),
};
foreach (var path in searchPaths)
{
if (File.Exists(path))
{
_logger.Debug("Found model file: {Path}", path);
return path;
}
}
_logger.Warning("Model file not found: {Model}_x{Scale}.onnx", model, scale);
return string.Empty;
}
}
@@ -0,0 +1,102 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: MorphologyProcessor.cs
// 描述: 形态学处理算子,用于二值图像的形态学操作
// 功能:
// - 腐蚀(Erode):收缩目标区域
// - 膨胀(Dilate):扩张目标区域
// - 开运算(Open):先腐蚀后膨胀,去除小目标
// - 闭运算(Close):先膨胀后腐蚀,填充小孔洞
// 算法: 数学形态学运算
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 形态学处理算子
/// </summary>
public class MorphologyProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MorphologyProcessor>();
public MorphologyProcessor()
{
Name = LocalizationHelper.GetString("MorphologyProcessor_Name");
Description = LocalizationHelper.GetString("MorphologyProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Operation", new ProcessorParameter(
"Operation",
LocalizationHelper.GetString("MorphologyProcessor_Operation"),
typeof(string),
"Erode",
null,
null,
LocalizationHelper.GetString("MorphologyProcessor_Operation_Desc"),
new string[] { "Erode", "Dilate", "Open", "Close" }));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("MorphologyProcessor_KernelSize"),
typeof(int),
3,
1,
21,
LocalizationHelper.GetString("MorphologyProcessor_KernelSize_Desc")));
Parameters.Add("Iterations", new ProcessorParameter(
"Iterations",
LocalizationHelper.GetString("MorphologyProcessor_Iterations"),
typeof(int),
1,
1,
10,
LocalizationHelper.GetString("MorphologyProcessor_Iterations_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string operation = GetParameter<string>("Operation");
int kernelSize = GetParameter<int>("KernelSize");
int iterations = GetParameter<int>("Iterations");
var kernel = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(kernelSize, kernelSize), new Point(-1, -1));
var result = inputImage.Clone();
switch (operation)
{
case "Erode":
CvInvoke.Erode(inputImage, result, kernel, new Point(-1, -1),
iterations, BorderType.Default, default);
break;
case "Dilate":
CvInvoke.Dilate(inputImage, result, kernel, new Point(-1, -1),
iterations, BorderType.Default, default);
break;
case "Open":
CvInvoke.MorphologyEx(inputImage, result, MorphOp.Open, kernel,
new Point(-1, -1), iterations, BorderType.Default, default);
break;
case "Close":
CvInvoke.MorphologyEx(inputImage, result, MorphOp.Close, kernel,
new Point(-1, -1), iterations, BorderType.Default, default);
break;
}
_logger.Debug("Process:Operation = {operation},KernelSize = {kernelSize},Iterations = {iterations}", operation, kernelSize, iterations);
return result;
}
}
@@ -0,0 +1,128 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: DifferenceProcessor.cs
// 描述: 差分运算算子,用于边缘检测和变化检测
// 功能:
// - 对图像进行差分运算
// - 支持水平、垂直和对角线差分
// - 可用于边缘检测
// - 可选归一化输出
// 算法: 像素级差分运算
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 差分运算算子
/// </summary>
public class DifferenceProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<DifferenceProcessor>();
public DifferenceProcessor()
{
Name = LocalizationHelper.GetString("DifferenceProcessor_Name");
Description = LocalizationHelper.GetString("DifferenceProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Direction", new ProcessorParameter(
"Direction",
LocalizationHelper.GetString("DifferenceProcessor_Direction"),
typeof(string),
"Horizontal",
null,
null,
LocalizationHelper.GetString("DifferenceProcessor_Direction_Desc"),
new string[] { "Horizontal", "Vertical", "Both" }));
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("DifferenceProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("DifferenceProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string direction = GetParameter<string>("Direction");
bool normalize = GetParameter<bool>("Normalize");
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
var result = new Image<Gray, float>(width, height);
if (direction == "Horizontal")
{
// 水平差分: I(x+1,y) - I(x,y)
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width - 1; x++)
{
result.Data[y, x, 0] = floatImage.Data[y, x + 1, 0] - floatImage.Data[y, x, 0];
}
result.Data[y, width - 1, 0] = 0;
}
}
else if (direction == "Vertical")
{
// 垂直差分: I(x,y+1) - I(x,y)
for (int y = 0; y < height - 1; y++)
{
for (int x = 0; x < width; x++)
{
result.Data[y, x, 0] = floatImage.Data[y + 1, x, 0] - floatImage.Data[y, x, 0];
}
}
for (int x = 0; x < width; x++)
{
result.Data[height - 1, x, 0] = 0;
}
}
else // Both
{
// 梯度幅值: sqrt((dx)^2 + (dy)^2)
for (int y = 0; y < height - 1; y++)
{
for (int x = 0; x < width - 1; x++)
{
float dx = floatImage.Data[y, x + 1, 0] - floatImage.Data[y, x, 0];
float dy = floatImage.Data[y + 1, x, 0] - floatImage.Data[y, x, 0];
result.Data[y, x, 0] = (float)Math.Sqrt(dx * dx + dy * dy);
}
}
}
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
floatImage.Dispose();
_logger.Debug("Process: Direction = {Direction}, Normalize = {Normalize}", direction, normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,90 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: DivisionProcessor.cs
// 描述: 除法运算算子,用于图像归一化处理
// 功能:
// - 对图像像素值进行除法运算
// - 支持缩放因子调整
// - 可选归一化到0-255范围
// - 常用于背景校正和图像归一化
// 算法: 像素级除法运算
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 除法运算算子
/// </summary>
public class DivisionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<DivisionProcessor>();
public DivisionProcessor()
{
Name = LocalizationHelper.GetString("DivisionProcessor_Name");
Description = LocalizationHelper.GetString("DivisionProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Divisor", new ProcessorParameter(
"Divisor",
LocalizationHelper.GetString("DivisionProcessor_Divisor"),
typeof(double),
2.0,
0.01,
255.0,
LocalizationHelper.GetString("DivisionProcessor_Divisor_Desc")));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("DivisionProcessor_Scale"),
typeof(double),
1.0,
0.1,
10.0,
LocalizationHelper.GetString("DivisionProcessor_Scale_Desc")));
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("DivisionProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("DivisionProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double divisor = GetParameter<double>("Divisor");
double scale = GetParameter<double>("Scale");
bool normalize = GetParameter<bool>("Normalize");
var floatImage = inputImage.Convert<Gray, float>();
var result = floatImage / divisor * scale;
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
_logger.Debug("Process:Divisor = {0}, Scale = {1}, Normalize = {2}", divisor, scale, normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,95 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: IntegralProcessor.cs
// 描述: 积分运算算子,计算积分图像
// 功能:
// - 计算积分图像(累加和)
// - 用于快速区域求和
// - 支持归一化输出
// 算法: 积分图像算法
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 积分运算算子
/// </summary>
public class IntegralProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<IntegralProcessor>();
public IntegralProcessor()
{
Name = LocalizationHelper.GetString("IntegralProcessor_Name");
Description = LocalizationHelper.GetString("IntegralProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("IntegralProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("IntegralProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
bool normalize = GetParameter<bool>("Normalize");
int width = inputImage.Width;
int height = inputImage.Height;
// 使用double类型避免溢出
var integralImage = new Image<Gray, double>(width, height);
// 计算积分图像
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
double sum = inputImage.Data[y, x, 0];
if (x > 0)
sum += integralImage.Data[y, x - 1, 0];
if (y > 0)
sum += integralImage.Data[y - 1, x, 0];
if (x > 0 && y > 0)
sum -= integralImage.Data[y - 1, x - 1, 0];
integralImage.Data[y, x, 0] = sum;
}
}
var result = integralImage.Convert<Gray, float>();
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
integralImage.Dispose();
_logger.Debug("Process: Normalize = {Normalize}", normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,88 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: MultiplicationProcessor.cs
// 描述: 乘法运算算子,用于图像增强
// 功能:
// - 对图像像素值进行乘法运算
// - 支持增益调整
// - 可选归一化输出
// - 常用于图像增强和对比度调整
// 算法: 像素级乘法运算
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 乘法运算算子
/// </summary>
public class MultiplicationProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MultiplicationProcessor>();
public MultiplicationProcessor()
{
Name = LocalizationHelper.GetString("MultiplicationProcessor_Name");
Description = LocalizationHelper.GetString("MultiplicationProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Multiplier", new ProcessorParameter(
"Multiplier",
LocalizationHelper.GetString("MultiplicationProcessor_Multiplier"),
typeof(double),
2.0,
0.1,
10.0,
LocalizationHelper.GetString("MultiplicationProcessor_Multiplier_Desc")));
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("MultiplicationProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("MultiplicationProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double multiplier = GetParameter<double>("Multiplier");
bool normalize = GetParameter<bool>("Normalize");
var floatImage = inputImage.Convert<Gray, float>();
var result = floatImage * multiplier;
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
else
{
// 不归一化时,直接截断到0-255范围
result = result.ThresholdBinary(new Gray(255), new Gray(255));
}
floatImage.Dispose();
_logger.Debug("Process: Multiplier = {Multiplier}, Normalize = {Normalize}", multiplier, normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,65 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: OrProcessor.cs
// 描述: 或运算算子,用于图像逻辑运算
// 功能:
// - 对图像进行按位或运算
// - 支持与固定值或运算
// - 可用于图像合并和掩码操作
// 算法: 像素级按位或运算
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 或运算算子
/// </summary>
public class OrProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<OrProcessor>();
public OrProcessor()
{
Name = LocalizationHelper.GetString("OrProcessor_Name");
Description = LocalizationHelper.GetString("OrProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Value", new ProcessorParameter(
"Value",
LocalizationHelper.GetString("OrProcessor_Value"),
typeof(int),
0,
0,
255,
LocalizationHelper.GetString("OrProcessor_Value_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int value = GetParameter<int>("Value");
var result = inputImage.Clone();
// 对每个像素进行按位或运算
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
result.Data[y, x, 0] = (byte)(inputImage.Data[y, x, 0] | value);
}
}
_logger.Debug("Process: Value = {Value}", value);
return result;
}
}
@@ -0,0 +1,87 @@
// ============================================================================
// 文件名: AngleMeasurementProcessor.cs
// 描述: 角度测量算子 — 共端点的两条直线夹角
// 功能:
// - 用户定义三个点:端点(顶点)、射线1终点、射线2终点
// - 计算两条射线之间的夹角(0°~180°)
// - 在图像上绘制两条射线、角度弧线和标注
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
public class AngleMeasurementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<AngleMeasurementProcessor>();
public AngleMeasurementProcessor()
{
Name = LocalizationHelper.GetString("AngleMeasurementProcessor_Name");
Description = LocalizationHelper.GetString("AngleMeasurementProcessor_Description");
}
protected override void InitializeParameters()
{
// 三个点坐标(由交互控件注入,使用 double 避免取整误差)
Parameters.Add("VX", new ProcessorParameter("VX", "VX", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("VY", new ProcessorParameter("VY", "VY", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("AX", new ProcessorParameter("AX", "AX", typeof(double), 100.0, null, null, "") { IsVisible = false });
Parameters.Add("AY", new ProcessorParameter("AY", "AY", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("BX", new ProcessorParameter("BX", "BX", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("BY", new ProcessorParameter("BY", "BY", typeof(double), 100.0, null, null, "") { IsVisible = false });
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double vx = GetParameter<double>("VX"), vy = GetParameter<double>("VY");
double ax = GetParameter<double>("AX"), ay = GetParameter<double>("AY");
double bx = GetParameter<double>("BX"), by = GetParameter<double>("BY");
OutputData.Clear();
// 向量 VA 和 VB
double vax = ax - vx, vay = ay - vy;
double vbx = bx - vx, vby = by - vy;
double lenA = Math.Sqrt(vax * vax + vay * vay);
double lenB = Math.Sqrt(vbx * vbx + vby * vby);
double angleDeg = 0;
if (lenA > 0.001 && lenB > 0.001)
{
double dot = vax * vbx + vay * vby;
double cosAngle = Math.Clamp(dot / (lenA * lenB), -1.0, 1.0);
angleDeg = Math.Acos(cosAngle) * 180.0 / Math.PI;
}
// 计算角度弧的起始角和扫过角(用于绘制弧线)
double angleA = Math.Atan2(vay, vax) * 180.0 / Math.PI;
double angleB = Math.Atan2(vby, vbx) * 180.0 / Math.PI;
// 确保从 angleA 到 angleB 的扫过方向是较小的夹角
double sweep = angleB - angleA;
if (sweep > 180) sweep -= 360;
if (sweep < -180) sweep += 360;
string angleText = $"{angleDeg:F2} deg";
OutputData["AngleMeasurementResult"] = true;
OutputData["Vertex"] = new Point((int)Math.Round(vx), (int)Math.Round(vy));
OutputData["PointA"] = new Point((int)Math.Round(ax), (int)Math.Round(ay));
OutputData["PointB"] = new Point((int)Math.Round(bx), (int)Math.Round(by));
OutputData["AngleDeg"] = angleDeg;
OutputData["ArcStartAngle"] = angleA;
OutputData["ArcSweepAngle"] = sweep;
OutputData["AngleText"] = angleText;
_logger.Information("AngleMeasurement: Angle={Angle}, V=({VX},{VY}), A=({AX},{AY}), B=({BX},{BY})",
angleText, vx, vy, ax, ay, bx, by);
return inputImage.Clone();
}
}
@@ -0,0 +1,403 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: BgaVoidRateProcessor.cs
// 描述: BGA 空洞率检测算子(两步自动检测法)
//
// 处理流程:
// 第一步 — 焊球定位: 高斯模糊 → Otsu反向二值化 → 闭运算 → 轮廓检测 → 圆度过滤 → 椭圆拟合
// 第二步 — 气泡检测: 焊球轮廓掩码 → 双阈值分割 → 轮廓检测 → 面积过滤 → 气泡率计算
//
// 支持多边形ROI限定检测区域,支持IPC-7095标准PASS/FAIL判定
// 正片模式:焊球=暗区域,气泡=亮区域
//
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
public class BgaVoidRateProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<BgaVoidRateProcessor>();
public BgaVoidRateProcessor()
{
Name = LocalizationHelper.GetString("BgaVoidRateProcessor_Name");
Description = LocalizationHelper.GetString("BgaVoidRateProcessor_Description");
}
protected override void InitializeParameters()
{
// ── ROI限定区域 ──
Parameters.Add("RoiMode", new ProcessorParameter(
"RoiMode",
LocalizationHelper.GetString("BgaVoidRateProcessor_RoiMode"),
typeof(string), "None", null, null,
LocalizationHelper.GetString("BgaVoidRateProcessor_RoiMode_Desc"),
new string[] { "None", "Polygon" }));
// 多边形ROI点数和坐标(由UI注入,不可见,最多支持32个点)
Parameters.Add("PolyCount", new ProcessorParameter("PolyCount", "PolyCount", typeof(int), 0, null, null, "") { IsVisible = false });
for (int i = 0; i < 32; i++)
{
Parameters.Add($"PolyX{i}", new ProcessorParameter($"PolyX{i}", $"PolyX{i}", typeof(int), 0, null, null, "") { IsVisible = false });
Parameters.Add($"PolyY{i}", new ProcessorParameter($"PolyY{i}", $"PolyY{i}", typeof(int), 0, null, null, "") { IsVisible = false });
}
// ── 第一步:BGA定位参数 ──
Parameters.Add("BgaMinArea", new ProcessorParameter(
"BgaMinArea",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMinArea"),
typeof(int), 500, 10, 1000000,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMinArea_Desc")));
Parameters.Add("BgaMaxArea", new ProcessorParameter(
"BgaMaxArea",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMaxArea"),
typeof(int), 500000, 100, 10000000,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMaxArea_Desc")));
Parameters.Add("BgaBlurSize", new ProcessorParameter(
"BgaBlurSize",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaBlurSize"),
typeof(int), 5, 1, 31,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaBlurSize_Desc")));
Parameters.Add("BgaCircularity", new ProcessorParameter(
"BgaCircularity",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaCircularity"),
typeof(double), 0.5, 0.0, 1.0,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaCircularity_Desc")));
// ── 第二步:气泡检测参数 ──
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold",
LocalizationHelper.GetString("BgaVoidRateProcessor_MinThreshold"),
typeof(int), 128, 0, 255,
LocalizationHelper.GetString("BgaVoidRateProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold",
LocalizationHelper.GetString("BgaVoidRateProcessor_MaxThreshold"),
typeof(int), 255, 0, 255,
LocalizationHelper.GetString("BgaVoidRateProcessor_MaxThreshold_Desc")));
Parameters.Add("MinVoidArea", new ProcessorParameter(
"MinVoidArea",
LocalizationHelper.GetString("BgaVoidRateProcessor_MinVoidArea"),
typeof(int), 10, 1, 10000,
LocalizationHelper.GetString("BgaVoidRateProcessor_MinVoidArea_Desc")));
Parameters.Add("VoidLimit", new ProcessorParameter(
"VoidLimit",
LocalizationHelper.GetString("BgaVoidRateProcessor_VoidLimit"),
typeof(double), 25.0, 0.0, 100.0,
LocalizationHelper.GetString("BgaVoidRateProcessor_VoidLimit_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("BgaVoidRateProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("BgaVoidRateProcessor_Thickness_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string roiMode = GetParameter<string>("RoiMode");
int bgaMinArea = GetParameter<int>("BgaMinArea");
int bgaMaxArea = GetParameter<int>("BgaMaxArea");
int bgaBlurSize = GetParameter<int>("BgaBlurSize");
double bgaCircularity = GetParameter<double>("BgaCircularity");
int minThresh = GetParameter<int>("MinThreshold");
int maxThresh = GetParameter<int>("MaxThreshold");
int minVoidArea = GetParameter<int>("MinVoidArea");
double voidLimit = GetParameter<double>("VoidLimit");
int thickness = GetParameter<int>("Thickness");
// 确保模糊核为奇数
if (bgaBlurSize % 2 == 0) bgaBlurSize++;
OutputData.Clear();
int w = inputImage.Width, h = inputImage.Height;
// 构建ROI掩码(限定检测区域)
Image<Gray, byte>? roiMask = null;
if (roiMode == "Polygon")
{
int polyCount = GetParameter<int>("PolyCount");
if (polyCount >= 3)
{
var pts = new Point[polyCount];
for (int i = 0; i < polyCount; i++)
pts[i] = new Point(GetParameter<int>($"PolyX{i}"), GetParameter<int>($"PolyY{i}"));
roiMask = new Image<Gray, byte>(w, h);
using var vop = new VectorOfPoint(pts);
using var vvop = new VectorOfVectorOfPoint(vop);
CvInvoke.DrawContours(roiMask, vvop, 0, new MCvScalar(255), -1);
_logger.Debug("ROI Polygon: {Count} points", polyCount);
}
}
OutputData["RoiMode"] = roiMode;
OutputData["RoiMask"] = roiMask;
_logger.Debug("BgaVoidRate 两步法: BgaArea=[{Min},{Max}], Blur={Blur}, Circ={Circ}, Thresh=[{TMin},{TMax}]",
bgaMinArea, bgaMaxArea, bgaBlurSize, bgaCircularity, minThresh, maxThresh);
// ================================================================
// 第一步:自动检测BGA焊球位置
// ================================================================
var bgaResults = DetectBgaBalls(inputImage, bgaBlurSize, bgaMinArea, bgaMaxArea, bgaCircularity, roiMask);
_logger.Information("第一步完成: 检测到 {Count} 个BGA焊球", bgaResults.Count);
if (bgaResults.Count == 0)
{
OutputData["BgaVoidResult"] = true;
OutputData["BgaCount"] = 0;
OutputData["BgaBalls"] = bgaResults;
OutputData["VoidRate"] = 0.0;
OutputData["Classification"] = "N/A";
OutputData["ResultText"] = "No BGA detected";
OutputData["Thickness"] = thickness;
OutputData["VoidLimit"] = voidLimit;
OutputData["TotalBgaArea"] = 0;
OutputData["TotalVoidArea"] = 0;
OutputData["TotalVoidCount"] = 0;
roiMask?.Dispose();
return inputImage.Clone();
}
// ================================================================
// 第二步:在每个焊球区域内检测气泡
// ================================================================
int totalBgaArea = 0;
int totalVoidArea = 0;
int totalVoidCount = 0;
foreach (var bga in bgaResults)
{
DetectVoidsInBga(inputImage, bga, minThresh, maxThresh, minVoidArea);
totalBgaArea += bga.BgaArea;
totalVoidArea += bga.VoidPixels;
totalVoidCount += bga.Voids.Count;
}
double overallVoidRate = totalBgaArea > 0 ? (double)totalVoidArea / totalBgaArea * 100.0 : 0;
string classification = overallVoidRate <= voidLimit ? "PASS" : "FAIL";
// 检查每个焊球是否单独超标
foreach (var bga in bgaResults)
{
bga.Classification = bga.VoidRate <= voidLimit ? "PASS" : "FAIL";
}
_logger.Information("第二步完成: 总气泡率={VoidRate:F1}%, 气泡数={Count}, 判定={Class}",
overallVoidRate, totalVoidCount, classification);
// 输出数据
OutputData["BgaVoidResult"] = true;
OutputData["BgaCount"] = bgaResults.Count;
OutputData["BgaBalls"] = bgaResults;
OutputData["VoidRate"] = overallVoidRate;
OutputData["FillRate"] = 100.0 - overallVoidRate;
OutputData["TotalBgaArea"] = totalBgaArea;
OutputData["TotalVoidArea"] = totalVoidArea;
OutputData["TotalVoidCount"] = totalVoidCount;
OutputData["VoidLimit"] = voidLimit;
OutputData["Classification"] = classification;
OutputData["Thickness"] = thickness;
OutputData["ResultText"] = $"Void: {overallVoidRate:F1}% | {classification} | BGA×{bgaResults.Count}";
roiMask?.Dispose();
return inputImage.Clone();
}
/// <summary>
/// 第一步:自动检测BGA焊球位置
/// 使用Otsu二值化 + 轮廓检测 + 圆度过滤 + 椭圆拟合
/// </summary>
private List<BgaBallInfo> DetectBgaBalls(Image<Gray, byte> input, int blurSize, int minArea, int maxArea, double minCircularity, Image<Gray, byte>? roiMask)
{
var results = new List<BgaBallInfo>();
int w = input.Width, h = input.Height;
// 高斯模糊降噪
var blurred = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(input, blurred, new Size(blurSize, blurSize), 0);
// Otsu自动二值化(X-Ray正片:焊球=暗区域)
var binary = new Image<Gray, byte>(w, h);
CvInvoke.Threshold(blurred, binary, 0, 255, ThresholdType.Otsu | ThresholdType.BinaryInv);
// 如果有ROI掩码,只保留ROI区域内的二值化结果
if (roiMask != null)
{
CvInvoke.BitwiseAnd(binary, roiMask, binary);
}
// 形态学闭运算填充小孔洞
var kernel = CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(5, 5), new Point(-1, -1));
CvInvoke.MorphologyEx(binary, binary, MorphOp.Close, kernel, new Point(-1, -1), 2, BorderType.Default, new MCvScalar(0));
// 查找轮廓
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(binary, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
int bgaIndex = 0;
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area < minArea || area > maxArea) continue;
// 圆度过滤: circularity = 4π × area / perimeter²
double perimeter = CvInvoke.ArcLength(contours[i], true);
if (perimeter < 1) continue;
double circularity = 4.0 * Math.PI * area / (perimeter * perimeter);
if (circularity < minCircularity) continue;
// 需要至少5个点才能拟合椭圆
if (contours[i].Size < 5) continue;
var ellipse = CvInvoke.FitEllipse(contours[i]);
var moments = CvInvoke.Moments(contours[i]);
if (moments.M00 < 1) continue;
bgaIndex++;
results.Add(new BgaBallInfo
{
Index = bgaIndex,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
FittedEllipse = ellipse,
ContourPoints = contours[i].ToArray(),
BgaArea = (int)area,
Circularity = circularity
});
}
// 按面积从大到小排序
results.Sort((a, b) => b.BgaArea.CompareTo(a.BgaArea));
for (int i = 0; i < results.Count; i++) results[i].Index = i + 1;
blurred.Dispose();
binary.Dispose();
kernel.Dispose();
return results;
}
/// <summary>
/// 第二步:在单个BGA焊球区域内检测气泡
/// 使用焊球轮廓作为掩码,双阈值分割气泡区域
/// </summary>
private void DetectVoidsInBga(Image<Gray, byte> input, BgaBallInfo bga, int minThresh, int maxThresh, int minVoidArea)
{
int w = input.Width, h = input.Height;
// 创建该焊球的掩码
var mask = new Image<Gray, byte>(w, h);
using (var vop = new VectorOfPoint(bga.ContourPoints))
using (var vvop = new VectorOfVectorOfPoint(vop))
{
CvInvoke.DrawContours(mask, vvop, 0, new MCvScalar(255), -1);
}
int bgaPixels = CvInvoke.CountNonZero(mask);
bga.BgaArea = bgaPixels;
// 双阈值分割(正片模式:气泡=亮,灰度在[minThresh, maxThresh]范围内判为气泡)
var voidImg = new Image<Gray, byte>(w, h);
byte[,,] srcData = input.Data;
byte[,,] dstData = voidImg.Data;
byte[,,] maskData = mask.Data;
for (int y = 0; y < h; y++)
{
for (int x = 0; x < w; x++)
{
if (maskData[y, x, 0] > 0)
{
byte val = srcData[y, x, 0];
dstData[y, x, 0] = (val >= minThresh && val <= maxThresh) ? (byte)255 : (byte)0;
}
}
}
int voidPixels = CvInvoke.CountNonZero(voidImg);
bga.VoidPixels = voidPixels;
bga.VoidRate = bgaPixels > 0 ? (double)voidPixels / bgaPixels * 100.0 : 0;
// 检测每个气泡的轮廓
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(voidImg, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area < minVoidArea) continue;
var moments = CvInvoke.Moments(contours[i]);
if (moments.M00 < 1) continue;
bga.Voids.Add(new VoidInfo
{
Index = bga.Voids.Count + 1,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
Area = area,
AreaPercent = bgaPixels > 0 ? area / bgaPixels * 100.0 : 0,
BoundingBox = CvInvoke.BoundingRectangle(contours[i]),
ContourPoints = contours[i].ToArray()
});
}
// 按面积从大到小排序
bga.Voids.Sort((a, b) => b.Area.CompareTo(a.Area));
for (int i = 0; i < bga.Voids.Count; i++) bga.Voids[i].Index = i + 1;
mask.Dispose();
voidImg.Dispose();
}
}
/// <summary>
/// 单个BGA焊球信息
/// </summary>
public class BgaBallInfo
{
public int Index { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public RotatedRect FittedEllipse { get; set; }
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
public int BgaArea { get; set; }
public double Circularity { get; set; }
public int VoidPixels { get; set; }
public double VoidRate { get; set; }
public string Classification { get; set; } = "N/A";
public List<VoidInfo> Voids { get; set; } = new();
}
/// <summary>
/// 单个气泡信息
/// </summary>
public class VoidInfo
{
public int Index { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public double Area { get; set; }
public double AreaPercent { get; set; }
public Rectangle BoundingBox { get; set; }
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
}
@@ -0,0 +1,254 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ContourProcessor.cs
// 描述: 轮廓查找算子,用于检测和分析图像中的轮廓
// 功能:
// - 检测图像中的外部轮廓
// - 根据面积范围过滤轮廓
// - 计算轮廓的几何特征(面积、周长、中心、外接矩形等)
// - 输出轮廓信息供后续处理使用
// 算法: 基于OpenCV的轮廓检测算法
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 轮廓查找算子
/// </summary>
public class ContourProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ContourProcessor>();
public ContourProcessor()
{
Name = LocalizationHelper.GetString("ContourProcessor_Name");
Description = LocalizationHelper.GetString("ContourProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("TargetColor", new ProcessorParameter(
"TargetColor",
LocalizationHelper.GetString("ContourProcessor_TargetColor"),
typeof(string),
"White",
null,
null,
LocalizationHelper.GetString("ContourProcessor_TargetColor_Desc"),
new string[] { "White", "Black" }));
Parameters.Add("UseThreshold", new ProcessorParameter(
"UseThreshold",
LocalizationHelper.GetString("ContourProcessor_UseThreshold"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContourProcessor_UseThreshold_Desc")));
Parameters.Add("ThresholdValue", new ProcessorParameter(
"ThresholdValue",
LocalizationHelper.GetString("ContourProcessor_ThresholdValue"),
typeof(int),
120,
0,
255,
LocalizationHelper.GetString("ContourProcessor_ThresholdValue_Desc")));
Parameters.Add("UseOtsu", new ProcessorParameter(
"UseOtsu",
LocalizationHelper.GetString("ContourProcessor_UseOtsu"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContourProcessor_UseOtsu_Desc")));
Parameters.Add("MinArea", new ProcessorParameter(
"MinArea",
LocalizationHelper.GetString("ContourProcessor_MinArea"),
typeof(double),
10.0,
0.0,
10000.0,
LocalizationHelper.GetString("ContourProcessor_MinArea_Desc")));
Parameters.Add("MaxArea", new ProcessorParameter(
"MaxArea",
LocalizationHelper.GetString("ContourProcessor_MaxArea"),
typeof(double),
100000.0,
0.0,
1000000.0,
LocalizationHelper.GetString("ContourProcessor_MaxArea_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("ContourProcessor_Thickness"),
typeof(int),
2,
1,
10,
LocalizationHelper.GetString("ContourProcessor_Thickness_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string targetColor = GetParameter<string>("TargetColor");
bool useThreshold = GetParameter<bool>("UseThreshold");
int thresholdValue = GetParameter<int>("ThresholdValue");
bool useOtsu = GetParameter<bool>("UseOtsu");
double minArea = GetParameter<double>("MinArea");
double maxArea = GetParameter<double>("MaxArea");
int thickness = GetParameter<int>("Thickness");
_logger.Debug("Process started: TargetColor = '{TargetColor}', UseThreshold = {UseThreshold}, ThresholdValue = {ThresholdValue}, UseOtsu = {UseOtsu}",
targetColor, useThreshold, thresholdValue, useOtsu);
OutputData.Clear();
// 创建输入图像的副本用于处理
Image<Gray, byte> processImage = inputImage.Clone();
// 步骤1:如果启用阈值分割,先进行二值化
if (useThreshold)
{
_logger.Debug("Applying threshold processing");
Image<Gray, byte> thresholdImage = new Image<Gray, byte>(processImage.Size);
if (useOtsu)
{
// 使用Otsu自动阈值
CvInvoke.Threshold(processImage, thresholdImage, 0, 255, ThresholdType.Otsu);
_logger.Debug("Applied Otsu threshold");
}
else
{
// 使用固定阈值
CvInvoke.Threshold(processImage, thresholdImage, thresholdValue, 255, ThresholdType.Binary);
_logger.Debug("Applied binary threshold with value {ThresholdValue}", thresholdValue);
}
// 保存阈值处理后的图像用于调试
try
{
string debugPath = Path.Combine("logs", $"contour_threshold_{DateTime.Now:yyyyMMdd_HHmmss}.png");
Directory.CreateDirectory("logs");
thresholdImage.Save(debugPath);
_logger.Information("Saved threshold image to: {DebugPath}", debugPath);
}
catch (Exception ex)
{
_logger.Warning(ex, "Failed to save threshold image for debugging");
}
processImage.Dispose();
processImage = thresholdImage;
}
// 步骤2:如果目标是黑色区域,需要反转图像
bool isBlackTarget = targetColor != null &&
(targetColor.Equals("Black", StringComparison.OrdinalIgnoreCase) ||
targetColor.Equals("黑色", StringComparison.OrdinalIgnoreCase));
if (isBlackTarget)
{
_logger.Debug("Inverting image for black region detection");
CvInvoke.BitwiseNot(processImage, processImage);
// 保存翻转后的图像用于调试
try
{
string debugPath = Path.Combine("logs", $"contour_inverted_{DateTime.Now:yyyyMMdd_HHmmss}.png");
Directory.CreateDirectory("logs");
processImage.Save(debugPath);
_logger.Information("Saved inverted image to: {DebugPath}", debugPath);
}
catch (Exception ex)
{
_logger.Warning(ex, "Failed to save inverted image for debugging");
}
}
// 步骤3:查找轮廓
using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
{
Mat hierarchy = new Mat();
CvInvoke.FindContours(processImage, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
_logger.Debug("Found {TotalContours} total contours before filtering", contours.Size);
List<ContourInfo> contourInfos = new();
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area >= minArea && area <= maxArea)
{
var moments = CvInvoke.Moments(contours[i]);
var boundingRect = CvInvoke.BoundingRectangle(contours[i]);
double perimeter = CvInvoke.ArcLength(contours[i], true);
var circle = CvInvoke.MinEnclosingCircle(contours[i]);
contourInfos.Add(new ContourInfo
{
Index = i,
Area = area,
Perimeter = perimeter,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
BoundingBox = boundingRect,
Points = contours[i].ToArray(),
CircleCenter = circle.Center,
CircleRadius = circle.Radius
});
_logger.Debug("Contour {Index}: Area = {Area}, Center = ({CenterX:F2}, {CenterY:F2})",
i, area, moments.M10 / moments.M00, moments.M01 / moments.M00);
}
else
{
_logger.Debug("Contour {Index} filtered out: Area = {Area} (not in range {MinArea} - {MaxArea})",
i, area, minArea, maxArea);
}
}
OutputData["ContourCount"] = contourInfos.Count;
OutputData["Contours"] = contourInfos;
OutputData["Thickness"] = thickness;
hierarchy.Dispose();
processImage.Dispose();
_logger.Information("Process completed: TargetColor = '{TargetColor}', Found {ContourCount} contours (filtered from {TotalContours})",
targetColor, contourInfos.Count, contours.Size);
return inputImage.Clone();
}
}
}
/// <summary>
/// 轮廓信息
/// </summary>
public class ContourInfo
{
public int Index { get; set; }
public double Area { get; set; }
public double Perimeter { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public Rectangle BoundingBox { get; set; }
public Point[] Points { get; set; } = Array.Empty<Point>();
public PointF CircleCenter { get; set; }
public float CircleRadius { get; set; }
}
@@ -0,0 +1,303 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: EllipseDetectionProcessor.cs
// 描述: 椭圆检测算子,基于轮廓分析和椭圆拟合检测图像中的椭圆
// 功能:
// - 阈值分割 + 轮廓提取
// - 椭圆拟合(FitEllipse
// - 面积/轴长/离心率/拟合误差多维过滤
// - 支持双阈值分割和 Otsu 自动阈值
// 算法: 阈值分割 + OpenCV FitEllipse
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 椭圆检测结果
/// </summary>
public class EllipseInfo
{
/// <summary>序号</summary>
public int Index { get; set; }
/// <summary>中心点X</summary>
public float CenterX { get; set; }
/// <summary>中心点Y</summary>
public float CenterY { get; set; }
/// <summary>长轴长度</summary>
public float MajorAxis { get; set; }
/// <summary>短轴长度</summary>
public float MinorAxis { get; set; }
/// <summary>旋转角度(度)</summary>
public float Angle { get; set; }
/// <summary>面积</summary>
public double Area { get; set; }
/// <summary>周长</summary>
public double Perimeter { get; set; }
/// <summary>离心率 (0=圆, 接近1=扁椭圆)</summary>
public double Eccentricity { get; set; }
/// <summary>拟合误差(像素)</summary>
public double FitError { get; set; }
/// <summary>轮廓点集</summary>
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
/// <summary>外接矩形</summary>
public Rectangle BoundingBox { get; set; }
}
/// <summary>
/// 椭圆检测器
/// </summary>
public class EllipseDetector
{
private static readonly ILogger _logger = Log.ForContext<EllipseDetector>();
public int MinThreshold { get; set; } = 64;
public int MaxThreshold { get; set; } = 192;
public bool UseOtsu { get; set; } = false;
public int MinContourPoints { get; set; } = 30;
public double MinArea { get; set; } = 100;
public double MaxArea { get; set; } = 1000000;
public float MinMajorAxis { get; set; } = 10;
public double MaxEccentricity { get; set; } = 0.95;
public double MaxFitError { get; set; } = 5.0;
public int Thickness { get; set; } = 2;
/// <summary>执行椭圆检测</summary>
public List<EllipseInfo> Detect(Image<Gray, byte> inputImage, Image<Gray, byte>? roiMask = null)
{
_logger.Debug("Ellipse detection started: UseOtsu={UseOtsu}, MinThreshold={Min}, MaxThreshold={Max}",
UseOtsu, MinThreshold, MaxThreshold);
var results = new List<EllipseInfo>();
using var binary = new Image<Gray, byte>(inputImage.Size);
if (UseOtsu)
{
CvInvoke.Threshold(inputImage, binary, MinThreshold, 255, ThresholdType.Otsu);
_logger.Debug("Using Otsu auto threshold");
}
else
{
// 双阈值分割:介于MinThreshold和MaxThreshold之间的为前景(255),其他为背景(0)
byte[,,] inputData = inputImage.Data;
byte[,,] outputData = binary.Data;
int height = inputImage.Height;
int width = inputImage.Width;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = inputData[y, x, 0];
outputData[y, x, 0] = (pixelValue >= MinThreshold && pixelValue <= MaxThreshold)
? (byte)255
: (byte)0;
}
}
_logger.Debug("Dual threshold segmentation: MinThreshold={Min}, MaxThreshold={Max}", MinThreshold, MaxThreshold);
}
// 应用ROI掩码
if (roiMask != null)
{
CvInvoke.BitwiseAnd(binary, roiMask, binary);
}
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(binary, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);
_logger.Debug("Found {Count} contours", contours.Size);
int index = 0;
for (int i = 0; i < contours.Size; i++)
{
var contour = contours[i];
if (contour.Size < Math.Max(5, MinContourPoints)) continue;
double area = CvInvoke.ContourArea(contour);
if (area < MinArea || area > MaxArea) continue;
RotatedRect ellipseRect = CvInvoke.FitEllipse(contour);
float majorAxis = Math.Max(ellipseRect.Size.Width, ellipseRect.Size.Height);
float minorAxis = Math.Min(ellipseRect.Size.Width, ellipseRect.Size.Height);
if (majorAxis < MinMajorAxis) continue;
double eccentricity = 0;
if (majorAxis > 0)
{
double ratio = minorAxis / majorAxis;
eccentricity = Math.Sqrt(1.0 - ratio * ratio);
}
if (eccentricity > MaxEccentricity) continue;
double fitError = ComputeFitError(contour.ToArray(), ellipseRect);
if (fitError > MaxFitError) continue;
results.Add(new EllipseInfo
{
Index = index++,
CenterX = ellipseRect.Center.X,
CenterY = ellipseRect.Center.Y,
MajorAxis = majorAxis,
MinorAxis = minorAxis,
Angle = ellipseRect.Angle,
Area = area,
Perimeter = CvInvoke.ArcLength(contour, true),
Eccentricity = eccentricity,
FitError = fitError,
ContourPoints = contour.ToArray(),
BoundingBox = CvInvoke.BoundingRectangle(contour)
});
}
_logger.Information("Ellipse detection completed: detected {Count} ellipses", results.Count);
return results;
}
private static double ComputeFitError(Point[] contourPoints, RotatedRect ellipse)
{
double cx = ellipse.Center.X, cy = ellipse.Center.Y;
double a = Math.Max(ellipse.Size.Width, ellipse.Size.Height) / 2.0;
double b = Math.Min(ellipse.Size.Width, ellipse.Size.Height) / 2.0;
double angleRad = ellipse.Angle * Math.PI / 180.0;
double cosA = Math.Cos(angleRad), sinA = Math.Sin(angleRad);
if (a < 1e-6) return double.MaxValue;
double totalError = 0;
foreach (var pt in contourPoints)
{
double dx = pt.X - cx, dy = pt.Y - cy;
double localX = dx * cosA + dy * sinA;
double localY = -dx * sinA + dy * cosA;
double ellipseVal = (localX * localX) / (a * a) + (localY * localY) / (b * b);
totalError += Math.Abs(Math.Sqrt(ellipseVal) - 1.0) * Math.Sqrt(a * b);
}
return totalError / contourPoints.Length;
}
}
/// <summary>
/// 椭圆检测算子
/// </summary>
public class EllipseDetectionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<EllipseDetectionProcessor>();
public EllipseDetectionProcessor()
{
Name = LocalizationHelper.GetString("EllipseDetectionProcessor_Name");
Description = LocalizationHelper.GetString("EllipseDetectionProcessor_Description");
}
protected override void InitializeParameters()
{
// ── 多边形ROI(由UI注入,最多32个点) ──
Parameters.Add("PolyCount", new ProcessorParameter("PolyCount", "PolyCount", typeof(int), 0, null, null, "") { IsVisible = false });
for (int i = 0; i < 32; i++)
{
Parameters.Add($"PolyX{i}", new ProcessorParameter($"PolyX{i}", $"PolyX{i}", typeof(int), 0, null, null, "") { IsVisible = false });
Parameters.Add($"PolyY{i}", new ProcessorParameter($"PolyY{i}", $"PolyY{i}", typeof(int), 0, null, null, "") { IsVisible = false });
}
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold", LocalizationHelper.GetString("EllipseDetectionProcessor_MinThreshold"),
typeof(int), 64, 0, 255,
LocalizationHelper.GetString("EllipseDetectionProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxThreshold"),
typeof(int), 192, 0, 255,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxThreshold_Desc")));
Parameters.Add("UseOtsu", new ProcessorParameter(
"UseOtsu", LocalizationHelper.GetString("EllipseDetectionProcessor_UseOtsu"),
typeof(bool), false, null, null,
LocalizationHelper.GetString("EllipseDetectionProcessor_UseOtsu_Desc")));
Parameters.Add("MinContourPoints", new ProcessorParameter(
"MinContourPoints", LocalizationHelper.GetString("EllipseDetectionProcessor_MinContourPoints"),
typeof(int), 30, 5, 1000,
LocalizationHelper.GetString("EllipseDetectionProcessor_MinContourPoints_Desc")));
Parameters.Add("MinArea", new ProcessorParameter(
"MinArea", LocalizationHelper.GetString("EllipseDetectionProcessor_MinArea"),
typeof(double), 100.0, 0.0, 1000000.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MinArea_Desc")));
Parameters.Add("MaxArea", new ProcessorParameter(
"MaxArea", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxArea"),
typeof(double), 1000000.0, 0.0, 10000000.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxArea_Desc")));
Parameters.Add("MaxEccentricity", new ProcessorParameter(
"MaxEccentricity", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxEccentricity"),
typeof(double), 0.95, 0.0, 1.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxEccentricity_Desc")));
Parameters.Add("MaxFitError", new ProcessorParameter(
"MaxFitError", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxFitError"),
typeof(double), 5.0, 0.0, 50.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxFitError_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness", LocalizationHelper.GetString("EllipseDetectionProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("EllipseDetectionProcessor_Thickness_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int thickness = GetParameter<int>("Thickness");
_logger.Debug("Ellipse detection started");
OutputData.Clear();
// 构建多边形ROI掩码
int polyCount = GetParameter<int>("PolyCount");
Image<Gray, byte>? roiMask = null;
if (polyCount >= 3)
{
var pts = new Point[polyCount];
for (int i = 0; i < polyCount; i++)
pts[i] = new Point(GetParameter<int>($"PolyX{i}"), GetParameter<int>($"PolyY{i}"));
roiMask = new Image<Gray, byte>(inputImage.Width, inputImage.Height);
using var vop = new VectorOfPoint(pts);
using var vvop = new VectorOfVectorOfPoint(vop);
CvInvoke.DrawContours(roiMask, vvop, 0, new MCvScalar(255), -1);
}
var detector = new EllipseDetector
{
MinThreshold = GetParameter<int>("MinThreshold"),
MaxThreshold = GetParameter<int>("MaxThreshold"),
UseOtsu = GetParameter<bool>("UseOtsu"),
MinContourPoints = GetParameter<int>("MinContourPoints"),
MinArea = GetParameter<double>("MinArea"),
MaxArea = GetParameter<double>("MaxArea"),
MaxEccentricity = GetParameter<double>("MaxEccentricity"),
MaxFitError = GetParameter<double>("MaxFitError"),
Thickness = thickness
};
var ellipses = detector.Detect(inputImage, roiMask);
OutputData["Ellipses"] = ellipses;
OutputData["EllipseCount"] = ellipses.Count;
OutputData["Thickness"] = thickness;
roiMask?.Dispose();
_logger.Information("Ellipse detection completed: detected {Count} ellipses", ellipses.Count);
return inputImage.Clone();
}
}
@@ -0,0 +1,133 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: FillRateProcessor.cs
// 描述: 通孔填锡率测量算子(倾斜投影几何法),基于四椭圆ROI
// 功能:
// - 样品倾斜约45°放置,利用投影位移关系计算填锡率
// - 四个椭圆定义:
// E1 = 通孔底部轮廓
// E2 = 通孔顶部轮廓
// E3 = 填锡起点(与E1重合,代表0%填锡)
// E4 = 填锡终点(锡实际填充到的高度)
// - 填锡率 = |E4中心 - E3中心| / |E2中心 - E1中心| × 100%
// - 纯几何方法,不依赖灰度分析
// - IPC-610 THT 分级判定(Class 1/2/3
// 算法: 倾斜投影位移比例
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 通孔填锡率测量算子(倾斜投影几何法)
/// </summary>
public class FillRateProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<FillRateProcessor>();
public FillRateProcessor()
{
Name = LocalizationHelper.GetString("FillRateProcessor_Name");
Description = LocalizationHelper.GetString("FillRateProcessor_Description");
}
protected override void InitializeParameters()
{
// 四个椭圆(由交互控件注入,UI不可见)
AddEllipseParams("E1", 200, 250, 60, 50, 0); // 底部
AddEllipseParams("E2", 220, 180, 60, 50, 0); // 顶部
AddEllipseParams("E3", 200, 250, 60, 50, 0); // 填锡起点(=E1
AddEllipseParams("E4", 210, 220, 55, 45, 0); // 填锡终点
Parameters.Add("THTLimit", new ProcessorParameter(
"THTLimit",
LocalizationHelper.GetString("FillRateProcessor_THTLimit"),
typeof(double), 75.0, 0.0, 100.0,
LocalizationHelper.GetString("FillRateProcessor_THTLimit_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("FillRateProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("FillRateProcessor_Thickness_Desc")));
}
private void AddEllipseParams(string prefix, int cx, int cy, double a, double b, double angle)
{
Parameters.Add($"{prefix}_CX", new ProcessorParameter($"{prefix}_CX", $"{prefix}_CX", typeof(int), cx, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_CY", new ProcessorParameter($"{prefix}_CY", $"{prefix}_CY", typeof(int), cy, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_A", new ProcessorParameter($"{prefix}_A", $"{prefix}_A", typeof(double), a, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_B", new ProcessorParameter($"{prefix}_B", $"{prefix}_B", typeof(double), b, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_Angle", new ProcessorParameter($"{prefix}_Angle", $"{prefix}_Angle", typeof(double), angle, null, null, "") { IsVisible = false });
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double thtLimit = GetParameter<double>("THTLimit");
int thickness = GetParameter<int>("Thickness");
// 获取四个椭圆中心
int e1cx = GetParameter<int>("E1_CX"), e1cy = GetParameter<int>("E1_CY");
int e2cx = GetParameter<int>("E2_CX"), e2cy = GetParameter<int>("E2_CY");
int e3cx = GetParameter<int>("E3_CX"), e3cy = GetParameter<int>("E3_CY");
int e4cx = GetParameter<int>("E4_CX"), e4cy = GetParameter<int>("E4_CY");
// 获取椭圆轴参数(用于绘制)
double e1a = GetParameter<double>("E1_A"), e1b = GetParameter<double>("E1_B"), e1ang = GetParameter<double>("E1_Angle");
double e2a = GetParameter<double>("E2_A"), e2b = GetParameter<double>("E2_B"), e2ang = GetParameter<double>("E2_Angle");
double e3a = GetParameter<double>("E3_A"), e3b = GetParameter<double>("E3_B"), e3ang = GetParameter<double>("E3_Angle");
double e4a = GetParameter<double>("E4_A"), e4b = GetParameter<double>("E4_B"), e4ang = GetParameter<double>("E4_Angle");
_logger.Debug("FillRate: E1=({E1X},{E1Y}), E2=({E2X},{E2Y}), E3=({E3X},{E3Y}), E4=({E4X},{E4Y})",
e1cx, e1cy, e2cx, e2cy, e3cx, e3cy, e4cx, e4cy);
OutputData.Clear();
// 计算通孔全高度的投影位移(E1底部 → E2顶部)
double fullDx = e2cx - e1cx;
double fullDy = e2cy - e1cy;
double fullDistance = Math.Sqrt(fullDx * fullDx + fullDy * fullDy);
// 计算填锡高度的投影位移(E3起点 → E4终点)
double fillDx = e4cx - e3cx;
double fillDy = e4cy - e3cy;
double fillDistance = Math.Sqrt(fillDx * fillDx + fillDy * fillDy);
// 填锡率 = 填锡位移 / 全高度位移
double fillRate = fullDistance > 0 ? (fillDistance / fullDistance) * 100.0 : 0;
fillRate = Math.Clamp(fillRate, 0, 100);
// 判定
string classification = fillRate >= thtLimit ? "PASS" : "FAIL";
// 存储结果
OutputData["FillRateResult"] = true;
OutputData["FillRate"] = fillRate;
OutputData["VoidRate"] = 100.0 - fillRate;
OutputData["FullDistance"] = fullDistance;
OutputData["FillDistance"] = fillDistance;
OutputData["THTLimit"] = thtLimit;
OutputData["Classification"] = classification;
OutputData["Thickness"] = thickness;
// 椭圆几何(用于绘制)
OutputData["E1"] = (new Point(e1cx, e1cy), new Size((int)e1a, (int)e1b), e1ang);
OutputData["E2"] = (new Point(e2cx, e2cy), new Size((int)e2a, (int)e2b), e2ang);
OutputData["E3"] = (new Point(e3cx, e3cy), new Size((int)e3a, (int)e3b), e3ang);
OutputData["E4"] = (new Point(e4cx, e4cy), new Size((int)e4a, (int)e4b), e4ang);
string resultText = $"{fillRate:F1}% | {classification}";
OutputData["ResultText"] = resultText;
_logger.Information("FillRate (geometric): {Rate}%, {Class}, FullDist={FD:F1}, FillDist={FiD:F1}",
fillRate, classification, fullDistance, fillDistance);
return inputImage.Clone();
}
}
@@ -0,0 +1,150 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: LineMeasurementProcessor.cs
// 描述: 直线测量算子,用于测量图像中两点之间的距离
// 功能:
// - 用户指定两个点坐标(像素坐标)
// - 计算两点之间的欧氏距离(像素单位)
// - 支持像素尺寸标定,输出实际物理距离
// - 在图像上绘制测量线和标注
// - 输出测量结果供后续处理使用
// 算法: 欧氏距离计算
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 直线测量算子 - 测量两点之间的距离
/// </summary>
public class LineMeasurementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<LineMeasurementProcessor>();
public LineMeasurementProcessor()
{
Name = LocalizationHelper.GetString("LineMeasurementProcessor_Name");
Description = LocalizationHelper.GetString("LineMeasurementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("X1", new ProcessorParameter(
"X1",
LocalizationHelper.GetString("LineMeasurementProcessor_X1"),
typeof(int), 100, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_X1_Desc"))
{ IsVisible = false });
Parameters.Add("Y1", new ProcessorParameter(
"Y1",
LocalizationHelper.GetString("LineMeasurementProcessor_Y1"),
typeof(int), 100, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_Y1_Desc"))
{ IsVisible = false });
Parameters.Add("X2", new ProcessorParameter(
"X2",
LocalizationHelper.GetString("LineMeasurementProcessor_X2"),
typeof(int), 400, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_X2_Desc"))
{ IsVisible = false });
Parameters.Add("Y2", new ProcessorParameter(
"Y2",
LocalizationHelper.GetString("LineMeasurementProcessor_Y2"),
typeof(int), 400, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_Y2_Desc"))
{ IsVisible = false });
Parameters.Add("PixelSize", new ProcessorParameter(
"PixelSize",
LocalizationHelper.GetString("LineMeasurementProcessor_PixelSize"),
typeof(double), 1.0, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_PixelSize_Desc")));
Parameters.Add("Unit", new ProcessorParameter(
"Unit",
LocalizationHelper.GetString("LineMeasurementProcessor_Unit"),
typeof(string), "px", null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_Unit_Desc"),
new string[] { "px", "mm", "μm", "cm" }));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("LineMeasurementProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("LineMeasurementProcessor_Thickness_Desc")));
Parameters.Add("ShowLabel", new ProcessorParameter(
"ShowLabel",
LocalizationHelper.GetString("LineMeasurementProcessor_ShowLabel"),
typeof(bool), true, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_ShowLabel_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int x1 = GetParameter<int>("X1");
int y1 = GetParameter<int>("Y1");
int x2 = GetParameter<int>("X2");
int y2 = GetParameter<int>("Y2");
double pixelSize = GetParameter<double>("PixelSize");
string unit = GetParameter<string>("Unit");
int thickness = GetParameter<int>("Thickness");
bool showLabel = GetParameter<bool>("ShowLabel");
_logger.Debug("LineMeasurement: ({X1},{Y1}) -> ({X2},{Y2}), PixelSize={PixelSize}, Unit={Unit}",
x1, y1, x2, y2, pixelSize, unit);
OutputData.Clear();
// 限制坐标在图像范围内
x1 = Math.Clamp(x1, 0, inputImage.Width - 1);
y1 = Math.Clamp(y1, 0, inputImage.Height - 1);
x2 = Math.Clamp(x2, 0, inputImage.Width - 1);
y2 = Math.Clamp(y2, 0, inputImage.Height - 1);
// 计算像素距离
double dx = x2 - x1;
double dy = y2 - y1;
double pixelDistance = Math.Sqrt(dx * dx + dy * dy);
// 计算实际距离
double actualDistance = pixelDistance * pixelSize;
// 计算角度(相对于水平方向)
double angleRad = Math.Atan2(dy, dx);
double angleDeg = angleRad * 180.0 / Math.PI;
// 存储测量结果
OutputData["MeasurementType"] = "Line";
OutputData["Point1"] = new Point(x1, y1);
OutputData["Point2"] = new Point(x2, y2);
OutputData["PixelDistance"] = pixelDistance;
OutputData["ActualDistance"] = actualDistance;
OutputData["Unit"] = unit;
OutputData["Angle"] = angleDeg;
OutputData["Thickness"] = thickness;
OutputData["ShowLabel"] = showLabel;
// 构建测量信息文本
string distanceText = unit == "px"
? $"{pixelDistance:F2} px"
: $"{actualDistance:F4} {unit} ({pixelDistance:F2} px)";
OutputData["MeasurementText"] = distanceText;
_logger.Information("LineMeasurement completed: Distance={Distance}, Angle={Angle:F2}°",
distanceText, angleDeg);
return inputImage.Clone();
}
}
@@ -0,0 +1,116 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: PointToLineProcessor.cs
// 描述: 点到直线距离测量算子
// 功能:
// - 用户定义一条直线(两个端点)和一个测量点
// - 计算测量点到直线的垂直距离
// - 支持像素尺寸标定输出物理距离
// - 在图像上绘制直线、测量点、垂足和距离标注
// 算法: 点到直线距离公式
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
public class PointToLineProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<PointToLineProcessor>();
public PointToLineProcessor()
{
Name = LocalizationHelper.GetString("PointToLineProcessor_Name");
Description = LocalizationHelper.GetString("PointToLineProcessor_Description");
}
protected override void InitializeParameters()
{
// 直线两端点 + 测量点(由交互控件注入)
Parameters.Add("L1X", new ProcessorParameter("L1X", "L1X", typeof(int), 100, null, null, "") { IsVisible = false });
Parameters.Add("L1Y", new ProcessorParameter("L1Y", "L1Y", typeof(int), 200, null, null, "") { IsVisible = false });
Parameters.Add("L2X", new ProcessorParameter("L2X", "L2X", typeof(int), 400, null, null, "") { IsVisible = false });
Parameters.Add("L2Y", new ProcessorParameter("L2Y", "L2Y", typeof(int), 200, null, null, "") { IsVisible = false });
Parameters.Add("PX", new ProcessorParameter("PX", "PX", typeof(int), 250, null, null, "") { IsVisible = false });
Parameters.Add("PY", new ProcessorParameter("PY", "PY", typeof(int), 100, null, null, "") { IsVisible = false });
Parameters.Add("PixelSize", new ProcessorParameter(
"PixelSize",
LocalizationHelper.GetString("PointToLineProcessor_PixelSize"),
typeof(double), 1.0, null, null,
LocalizationHelper.GetString("PointToLineProcessor_PixelSize_Desc")));
Parameters.Add("Unit", new ProcessorParameter(
"Unit",
LocalizationHelper.GetString("PointToLineProcessor_Unit"),
typeof(string), "px", null, null,
LocalizationHelper.GetString("PointToLineProcessor_Unit_Desc"),
new string[] { "px", "mm", "μm", "cm" }));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("PointToLineProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("PointToLineProcessor_Thickness_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int l1x = GetParameter<int>("L1X"), l1y = GetParameter<int>("L1Y");
int l2x = GetParameter<int>("L2X"), l2y = GetParameter<int>("L2Y");
int px = GetParameter<int>("PX"), py = GetParameter<int>("PY");
double pixelSize = GetParameter<double>("PixelSize");
string unit = GetParameter<string>("Unit");
int thickness = GetParameter<int>("Thickness");
OutputData.Clear();
// 点到直线距离公式: |AB × AP| / |AB|
double abx = l2x - l1x, aby = l2y - l1y;
double abLen = Math.Sqrt(abx * abx + aby * aby);
double pixelDistance = 0;
int footX = px, footY = py;
if (abLen > 0.001)
{
// 叉积求距离
double cross = Math.Abs(abx * (l1y - py) - aby * (l1x - px));
pixelDistance = cross / abLen;
// 垂足: 投影参数 t = AP·AB / |AB|²
double apx = px - l1x, apy = py - l1y;
double t = (apx * abx + apy * aby) / (abLen * abLen);
footX = (int)(l1x + t * abx);
footY = (int)(l1y + t * aby);
OutputData["ProjectionT"] = t;
}
double actualDistance = pixelDistance * pixelSize;
string distanceText = unit == "px"
? $"{pixelDistance:F2} px"
: $"{actualDistance:F4} {unit} ({pixelDistance:F2} px)";
OutputData["PointToLineResult"] = true;
OutputData["Line1"] = new Point(l1x, l1y);
OutputData["Line2"] = new Point(l2x, l2y);
OutputData["MeasurePoint"] = new Point(px, py);
OutputData["FootPoint"] = new Point(footX, footY);
OutputData["PixelDistance"] = pixelDistance;
OutputData["ActualDistance"] = actualDistance;
OutputData["Unit"] = unit;
OutputData["Thickness"] = thickness;
OutputData["DistanceText"] = distanceText;
_logger.Information("PointToLine: Distance={Dist}, Foot=({FX},{FY})", distanceText, footX, footY);
return inputImage.Clone();
}
}
@@ -0,0 +1,230 @@
// ============================================================================
// 文件名: VoidMeasurementProcessor.cs
// 描述: 空隙测量算子
//
// 处理流程:
// 1. 构建多边形ROI掩码,计算ROI面积
// 2. 在ROI内进行双阈值分割提取气泡区域
// 3. 形态学膨胀合并相邻气泡
// 4. 轮廓检测,计算每个气泡面积
// 5. 计算空隙率 = 总气泡面积 / ROI面积
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
public class VoidMeasurementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<VoidMeasurementProcessor>();
public VoidMeasurementProcessor()
{
Name = LocalizationHelper.GetString("VoidMeasurementProcessor_Name");
Description = LocalizationHelper.GetString("VoidMeasurementProcessor_Description");
}
protected override void InitializeParameters()
{
// ── 多边形ROI(由UI注入,最多32个点) ──
Parameters.Add("PolyCount", new ProcessorParameter("PolyCount", "PolyCount", typeof(int), 0, null, null, "") { IsVisible = false });
for (int i = 0; i < 32; i++)
{
Parameters.Add($"PolyX{i}", new ProcessorParameter($"PolyX{i}", $"PolyX{i}", typeof(int), 0, null, null, "") { IsVisible = false });
Parameters.Add($"PolyY{i}", new ProcessorParameter($"PolyY{i}", $"PolyY{i}", typeof(int), 0, null, null, "") { IsVisible = false });
}
// ── 气泡检测参数 ──
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold",
LocalizationHelper.GetString("VoidMeasurementProcessor_MinThreshold"),
typeof(int), 128, 0, 255,
LocalizationHelper.GetString("VoidMeasurementProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold",
LocalizationHelper.GetString("VoidMeasurementProcessor_MaxThreshold"),
typeof(int), 255, 0, 255,
LocalizationHelper.GetString("VoidMeasurementProcessor_MaxThreshold_Desc")));
Parameters.Add("MinVoidArea", new ProcessorParameter(
"MinVoidArea",
LocalizationHelper.GetString("VoidMeasurementProcessor_MinVoidArea"),
typeof(int), 10, 1, 100000,
LocalizationHelper.GetString("VoidMeasurementProcessor_MinVoidArea_Desc")));
Parameters.Add("MergeRadius", new ProcessorParameter(
"MergeRadius",
LocalizationHelper.GetString("VoidMeasurementProcessor_MergeRadius"),
typeof(int), 3, 0, 30,
LocalizationHelper.GetString("VoidMeasurementProcessor_MergeRadius_Desc")));
Parameters.Add("BlurSize", new ProcessorParameter(
"BlurSize",
LocalizationHelper.GetString("VoidMeasurementProcessor_BlurSize"),
typeof(int), 3, 1, 31,
LocalizationHelper.GetString("VoidMeasurementProcessor_BlurSize_Desc")));
Parameters.Add("VoidLimit", new ProcessorParameter(
"VoidLimit",
LocalizationHelper.GetString("VoidMeasurementProcessor_VoidLimit"),
typeof(double), 25.0, 0.0, 100.0,
LocalizationHelper.GetString("VoidMeasurementProcessor_VoidLimit_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int minThresh = GetParameter<int>("MinThreshold");
int maxThresh = GetParameter<int>("MaxThreshold");
int minVoidArea = GetParameter<int>("MinVoidArea");
int mergeRadius = GetParameter<int>("MergeRadius");
int blurSize = GetParameter<int>("BlurSize");
double voidLimit = GetParameter<double>("VoidLimit");
if (blurSize % 2 == 0) blurSize++;
OutputData.Clear();
int w = inputImage.Width, h = inputImage.Height;
// ── 构建多边形ROI掩码 ──
int polyCount = GetParameter<int>("PolyCount");
Image<Gray, byte>? roiMask = null;
Point[]? roiPoints = null;
if (polyCount >= 3)
{
roiPoints = new Point[polyCount];
for (int i = 0; i < polyCount; i++)
roiPoints[i] = new Point(GetParameter<int>($"PolyX{i}"), GetParameter<int>($"PolyY{i}"));
roiMask = new Image<Gray, byte>(w, h);
using var vop = new VectorOfPoint(roiPoints);
using var vvop = new VectorOfVectorOfPoint(vop);
CvInvoke.DrawContours(roiMask, vvop, 0, new MCvScalar(255), -1);
}
else
{
// 无ROI时使用全图
roiMask = new Image<Gray, byte>(w, h);
roiMask.SetValue(new Gray(255));
}
int roiArea = CvInvoke.CountNonZero(roiMask);
_logger.Debug("VoidMeasurement: ROI area={Area}, Thresh=[{Min},{Max}], MergeR={MR}",
roiArea, minThresh, maxThresh, mergeRadius);
// ── 高斯模糊降噪 ──
var blurred = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(inputImage, blurred, new Size(blurSize, blurSize), 0);
// ── 双阈值分割提取气泡(亮区域) ──
var voidImg = new Image<Gray, byte>(w, h);
byte[,,] srcData = blurred.Data;
byte[,,] dstData = voidImg.Data;
byte[,,] maskData = roiMask.Data;
for (int y = 0; y < h; y++)
{
for (int x = 0; x < w; x++)
{
if (maskData[y, x, 0] > 0)
{
byte val = srcData[y, x, 0];
dstData[y, x, 0] = (val >= minThresh && val <= maxThresh) ? (byte)255 : (byte)0;
}
}
}
// ── 形态学膨胀合并相邻气泡 ──
if (mergeRadius > 0)
{
int kernelSize = mergeRadius * 2 + 1;
using var kernel = CvInvoke.GetStructuringElement(ElementShape.Ellipse,
new Size(kernelSize, kernelSize), new Point(-1, -1));
CvInvoke.Dilate(voidImg, voidImg, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0));
// 与ROI掩码取交集,防止膨胀超出ROI
CvInvoke.BitwiseAnd(voidImg, roiMask, voidImg);
}
// ── 轮廓检测 ──
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(voidImg, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
var voids = new List<VoidRegionInfo>();
int totalVoidArea = 0;
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area < minVoidArea) continue;
var moments = CvInvoke.Moments(contours[i]);
if (moments.M00 < 1) continue;
int intArea = (int)Math.Round(area);
totalVoidArea += intArea;
voids.Add(new VoidRegionInfo
{
Index = voids.Count + 1,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
Area = intArea,
AreaPercent = roiArea > 0 ? area / roiArea * 100.0 : 0,
BoundingBox = CvInvoke.BoundingRectangle(contours[i]),
ContourPoints = contours[i].ToArray()
});
}
// 按面积从大到小排序
voids.Sort((a, b) => b.Area.CompareTo(a.Area));
for (int i = 0; i < voids.Count; i++) voids[i].Index = i + 1;
double voidRate = roiArea > 0 ? (double)totalVoidArea / roiArea * 100.0 : 0;
string classification = voidRate <= voidLimit ? "PASS" : "FAIL";
int maxVoidArea = voids.Count > 0 ? voids[0].Area : 0;
_logger.Information("VoidMeasurement: VoidRate={Rate:F1}%, Voids={Count}, MaxArea={Max}, {Class}",
voidRate, voids.Count, maxVoidArea, classification);
// ── 输出数据 ──
OutputData["VoidMeasurementResult"] = true;
OutputData["RoiArea"] = roiArea;
OutputData["RoiPoints"] = roiPoints;
OutputData["TotalVoidArea"] = totalVoidArea;
OutputData["VoidRate"] = voidRate;
OutputData["VoidLimit"] = voidLimit;
OutputData["VoidCount"] = voids.Count;
OutputData["MaxVoidArea"] = maxVoidArea;
OutputData["Classification"] = classification;
OutputData["Voids"] = voids;
OutputData["ResultText"] = $"Void: {voidRate:F1}% | {classification} | {voids.Count} voids | ROI: {roiArea}px";
blurred.Dispose();
voidImg.Dispose();
roiMask.Dispose();
return inputImage.Clone();
}
}
/// <summary>
/// 单个空隙区域信息
/// </summary>
public class VoidRegionInfo
{
public int Index { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public int Area { get; set; }
public double AreaPercent { get; set; }
public Rectangle BoundingBox { get; set; }
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
}
@@ -0,0 +1,197 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: BandPassFilterProcessor.cs
// 描述: 带通滤波器算子,用于频域图像处理
// 功能:
// - 在频域中保留特定频率范围的信号
// - 支持理想、巴特沃斯、高斯三种滤波器类型
// - 可调节低频和高频截止频率
// - 通过FFT实现频域滤波
// 算法: 基于离散傅里叶变换(DFT)的频域滤波
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 带通滤波器算子
/// </summary>
public class BandPassFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<BandPassFilterProcessor>();
public BandPassFilterProcessor()
{
Name = LocalizationHelper.GetString("BandPassFilterProcessor_Name");
Description = LocalizationHelper.GetString("BandPassFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("LowCutoff", new ProcessorParameter(
"LowCutoff",
LocalizationHelper.GetString("BandPassFilterProcessor_LowCutoff"),
typeof(int),
10,
1,
200,
LocalizationHelper.GetString("BandPassFilterProcessor_LowCutoff_Desc")));
Parameters.Add("HighCutoff", new ProcessorParameter(
"HighCutoff",
LocalizationHelper.GetString("BandPassFilterProcessor_HighCutoff"),
typeof(int),
50,
2,
500,
LocalizationHelper.GetString("BandPassFilterProcessor_HighCutoff_Desc")));
Parameters.Add("FilterType", new ProcessorParameter(
"FilterType",
LocalizationHelper.GetString("BandPassFilterProcessor_FilterType"),
typeof(string),
"Ideal",
null,
null,
LocalizationHelper.GetString("BandPassFilterProcessor_FilterType_Desc"),
new string[] { "Ideal", "Butterworth", "Gaussian" }));
Parameters.Add("Order", new ProcessorParameter(
"Order",
LocalizationHelper.GetString("BandPassFilterProcessor_Order"),
typeof(int),
2,
1,
10,
LocalizationHelper.GetString("BandPassFilterProcessor_Order_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int lowCutoff = GetParameter<int>("LowCutoff");
int highCutoff = GetParameter<int>("HighCutoff");
string filterType = GetParameter<string>("FilterType");
int order = GetParameter<int>("Order");
if (highCutoff <= lowCutoff)
{
highCutoff = lowCutoff + 10;
}
var floatImage = inputImage.Convert<Gray, float>();
var imaginaryImage = new Image<Gray, float>(floatImage.Size);
imaginaryImage.SetZero();
using (var planes = new Emgu.CV.Util.VectorOfMat())
{
planes.Push(floatImage.Mat);
planes.Push(imaginaryImage.Mat);
Mat complexMat = new Mat();
CvInvoke.Merge(planes, complexMat);
Mat dftMat = new Mat();
CvInvoke.Dft(complexMat, dftMat, DxtType.Forward, 0);
var mask = CreateBandPassMask(floatImage.Size, lowCutoff, highCutoff, filterType, order);
using (var dftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(dftMat, dftPlanes);
Mat real = dftPlanes[0];
Mat imag = dftPlanes[1];
CvInvoke.Multiply(real, mask.Mat, real);
CvInvoke.Multiply(imag, mask.Mat, imag);
using (var filteredPlanes = new Emgu.CV.Util.VectorOfMat())
{
filteredPlanes.Push(real);
filteredPlanes.Push(imag);
Mat filteredDft = new Mat();
CvInvoke.Merge(filteredPlanes, filteredDft);
Mat idftMat = new Mat();
CvInvoke.Dft(filteredDft, idftMat, DxtType.Inverse | DxtType.Scale, 0);
using (var idftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(idftMat, idftPlanes);
var result = new Image<Gray, float>(floatImage.Size);
idftPlanes[0].CopyTo(result);
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
complexMat.Dispose();
dftMat.Dispose();
filteredDft.Dispose();
idftMat.Dispose();
_logger.Debug("Process: LowCutoff = {0}, HighCutoff = {1}, FilterType = {2}, Order = {3}", lowCutoff, highCutoff, filterType, order);
return result.Convert<Gray, byte>();
}
}
}
}
}
private Image<Gray, float> CreateBandPassMask(Size size, int lowCutoff, int highCutoff, string filterType, int order)
{
var mask = new Image<Gray, float>(size);
int cx = size.Width / 2;
int cy = size.Height / 2;
for (int y = 0; y < size.Height; y++)
{
for (int x = 0; x < size.Width; x++)
{
double dx = x - cx;
double dy = y - cy;
double distance = Math.Sqrt(dx * dx + dy * dy);
float value = 0;
switch (filterType)
{
case "理想":
value = (distance >= lowCutoff && distance <= highCutoff) ? 1.0f : 0.0f;
break;
case "巴特沃斯":
double highPass = 1.0 / (1.0 + Math.Pow(lowCutoff / (distance + 0.001), 2 * order));
double lowPass = 1.0 / (1.0 + Math.Pow(distance / (highCutoff + 0.001), 2 * order));
value = (float)(highPass * lowPass);
break;
case "高斯":
double highPassGaussian = 1.0 - Math.Exp(-distance * distance / (2.0 * lowCutoff * lowCutoff));
double lowPassGaussian = Math.Exp(-distance * distance / (2.0 * highCutoff * highCutoff));
value = (float)(highPassGaussian * lowPassGaussian);
break;
}
mask.Data[y, x, 0] = value;
}
}
return mask;
}
}
@@ -0,0 +1,78 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: BilateralFilterProcessor.cs
// 描述: 双边滤波算子,用于保边降噪
// 功能:
// - 双边滤波
// - 保持边缘清晰的同时平滑图像
// - 可调节核大小和标准差
// 算法: 双边滤波
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 双边滤波算子
/// </summary>
public class BilateralFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<BilateralFilterProcessor>();
public BilateralFilterProcessor()
{
Name = LocalizationHelper.GetString("BilateralFilterProcessor_Name");
Description = LocalizationHelper.GetString("BilateralFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Diameter", new ProcessorParameter(
"Diameter",
LocalizationHelper.GetString("BilateralFilterProcessor_Diameter"),
typeof(int),
9,
1,
31,
LocalizationHelper.GetString("BilateralFilterProcessor_Diameter_Desc")));
Parameters.Add("SigmaColor", new ProcessorParameter(
"SigmaColor",
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaColor"),
typeof(double),
75.0,
1.0,
200.0,
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaColor_Desc")));
Parameters.Add("SigmaSpace", new ProcessorParameter(
"SigmaSpace",
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaSpace"),
typeof(double),
75.0,
1.0,
200.0,
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaSpace_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int diameter = GetParameter<int>("Diameter");
double sigmaColor = GetParameter<double>("SigmaColor");
double sigmaSpace = GetParameter<double>("SigmaSpace");
var result = inputImage.Clone();
CvInvoke.BilateralFilter(inputImage, result, diameter, sigmaColor, sigmaSpace);
_logger.Debug("Process: Diameter = {Diameter}, SigmaColor = {SigmaColor}, SigmaSpace = {SigmaSpace}",
diameter, sigmaColor, sigmaSpace);
return result;
}
}
@@ -0,0 +1,69 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: GaussianBlurProcessor.cs
// 描述: 高斯模糊算子,用于图像平滑和降噪
// 功能:
// - 高斯核卷积平滑
// - 可调节核大小和标准差
// - 有效去除高斯噪声
// - 保持边缘相对清晰
// 算法: 高斯滤波器卷积
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 高斯模糊算子
/// </summary>
public class GaussianBlurProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<GammaProcessor>();
public GaussianBlurProcessor()
{
Name = LocalizationHelper.GetString("GaussianBlurProcessor_Name");
Description = LocalizationHelper.GetString("GaussianBlurProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("GaussianBlurProcessor_KernelSize"),
typeof(int),
5,
1,
31,
LocalizationHelper.GetString("GaussianBlurProcessor_KernelSize_Desc")));
Parameters.Add("Sigma", new ProcessorParameter(
"Sigma",
LocalizationHelper.GetString("GaussianBlurProcessor_Sigma"),
typeof(double),
1.5,
0.1,
10.0,
LocalizationHelper.GetString("GaussianBlurProcessor_Sigma_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int kernelSize = GetParameter<int>("KernelSize");
double sigma = GetParameter<double>("Sigma");
if (kernelSize % 2 == 0) kernelSize++;
var result = inputImage.Clone();
CvInvoke.GaussianBlur(inputImage, result,
new System.Drawing.Size(kernelSize, kernelSize), sigma);
_logger.Debug("Process: KernelSize = {KernelSize}, Sigma = {Sigma}", kernelSize, sigma);
return result;
}
}
@@ -0,0 +1,148 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: HighPassFilterProcessor.cs
// 描述: 高通滤波算子,用于边缘增强
// 功能:
// - 高通滤波(频域)
// - 边缘增强
// - 去除低频信息
// - 可调节截止频率
// 算法: 高斯高通滤波器(频域)
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 高通滤波算子
/// </summary>
public class HighPassFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HighPassFilterProcessor>();
public HighPassFilterProcessor()
{
Name = LocalizationHelper.GetString("HighPassFilterProcessor_Name");
Description = LocalizationHelper.GetString("HighPassFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("CutoffFrequency", new ProcessorParameter(
"CutoffFrequency",
LocalizationHelper.GetString("HighPassFilterProcessor_CutoffFrequency"),
typeof(double),
30.0,
1.0,
200.0,
LocalizationHelper.GetString("HighPassFilterProcessor_CutoffFrequency_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double cutoffFrequency = GetParameter<double>("CutoffFrequency");
int rows = inputImage.Rows;
int cols = inputImage.Cols;
// 转换为浮点型
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
// 创建复数图像用于FFT
Mat complexImage = new Mat();
using (var planes = new Emgu.CV.Util.VectorOfMat())
{
planes.Push(floatImage.Mat);
planes.Push(Mat.Zeros(rows, cols, DepthType.Cv32F, 1));
CvInvoke.Merge(planes, complexImage);
}
// 执行DFT
Mat dftImage = new Mat();
CvInvoke.Dft(complexImage, dftImage, DxtType.Forward);
// 分离实部和虚部
using (var dftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(dftImage, dftPlanes);
Mat real = dftPlanes[0];
Mat imag = dftPlanes[1];
// 创建高通滤波器
Mat filter = CreateHighPassFilter(rows, cols, cutoffFrequency);
// 应用滤波器
CvInvoke.Multiply(real, filter, real);
CvInvoke.Multiply(imag, filter, imag);
// 合并并执行逆DFT
using (var filteredPlanes = new Emgu.CV.Util.VectorOfMat())
{
filteredPlanes.Push(real);
filteredPlanes.Push(imag);
Mat filteredDft = new Mat();
CvInvoke.Merge(filteredPlanes, filteredDft);
Mat ifftImage = new Mat();
CvInvoke.Dft(filteredDft, ifftImage, DxtType.Inverse | DxtType.Scale);
// 分离实部
using (var ifftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(ifftImage, ifftPlanes);
// 转换回byte类型
Mat resultMat = new Mat();
ifftPlanes[0].ConvertTo(resultMat, DepthType.Cv8U);
Image<Gray, byte> result = resultMat.ToImage<Gray, byte>();
// 释放资源
floatImage.Dispose();
complexImage.Dispose();
dftImage.Dispose();
filter.Dispose();
filteredDft.Dispose();
ifftImage.Dispose();
resultMat.Dispose();
_logger.Debug("Process: CutoffFrequency = {CutoffFrequency}", cutoffFrequency);
return result;
}
}
}
}
/// <summary>
/// 创建高斯高通滤波器
/// </summary>
private Mat CreateHighPassFilter(int rows, int cols, double d0)
{
var filter = new Image<Gray, float>(cols, rows);
int centerX = cols / 2;
int centerY = rows / 2;
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
double distance = Math.Sqrt(Math.Pow(i - centerY, 2) + Math.Pow(j - centerX, 2));
float value = (float)(1 - Math.Exp(-(distance * distance) / (2 * d0 * d0)));
filter.Data[i, j, 0] = value;
}
}
return filter.Mat;
}
}
@@ -0,0 +1,148 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: LowPassFilterProcessor.cs
// 描述: 低通滤波算子,用于去除高频噪声
// 功能:
// - 低通滤波(频域)
// - 去除高频噪声
// - 平滑图像
// - 可调节截止频率
// 算法: 高斯低通滤波器(频域)
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 低通滤波算子
/// </summary>
public class LowPassFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<LowPassFilterProcessor>();
public LowPassFilterProcessor()
{
Name = LocalizationHelper.GetString("LowPassFilterProcessor_Name");
Description = LocalizationHelper.GetString("LowPassFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("CutoffFrequency", new ProcessorParameter(
"CutoffFrequency",
LocalizationHelper.GetString("LowPassFilterProcessor_CutoffFrequency"),
typeof(double),
30.0,
1.0,
200.0,
LocalizationHelper.GetString("LowPassFilterProcessor_CutoffFrequency_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double cutoffFrequency = GetParameter<double>("CutoffFrequency");
int rows = inputImage.Rows;
int cols = inputImage.Cols;
// 转换为浮点型
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
// 创建复数图像用于FFT
Mat complexImage = new Mat();
using (var planes = new Emgu.CV.Util.VectorOfMat())
{
planes.Push(floatImage.Mat);
planes.Push(Mat.Zeros(rows, cols, DepthType.Cv32F, 1));
CvInvoke.Merge(planes, complexImage);
}
// 执行DFT
Mat dftImage = new Mat();
CvInvoke.Dft(complexImage, dftImage, DxtType.Forward);
// 分离实部和虚部
using (var dftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(dftImage, dftPlanes);
Mat real = dftPlanes[0];
Mat imag = dftPlanes[1];
// 创建低通滤波器
Mat filter = CreateLowPassFilter(rows, cols, cutoffFrequency);
// 应用滤波器
CvInvoke.Multiply(real, filter, real);
CvInvoke.Multiply(imag, filter, imag);
// 合并并执行逆DFT
using (var filteredPlanes = new Emgu.CV.Util.VectorOfMat())
{
filteredPlanes.Push(real);
filteredPlanes.Push(imag);
Mat filteredDft = new Mat();
CvInvoke.Merge(filteredPlanes, filteredDft);
Mat ifftImage = new Mat();
CvInvoke.Dft(filteredDft, ifftImage, DxtType.Inverse | DxtType.Scale);
// 分离实部
using (var ifftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(ifftImage, ifftPlanes);
// 转换回byte类型
Mat resultMat = new Mat();
ifftPlanes[0].ConvertTo(resultMat, DepthType.Cv8U);
Image<Gray, byte> result = resultMat.ToImage<Gray, byte>();
// 释放资源
floatImage.Dispose();
complexImage.Dispose();
dftImage.Dispose();
filter.Dispose();
filteredDft.Dispose();
ifftImage.Dispose();
resultMat.Dispose();
_logger.Debug("Process: CutoffFrequency = {CutoffFrequency}", cutoffFrequency);
return result;
}
}
}
}
/// <summary>
/// 创建高斯低通滤波器
/// </summary>
private Mat CreateLowPassFilter(int rows, int cols, double d0)
{
var filter = new Image<Gray, float>(cols, rows);
int centerX = cols / 2;
int centerY = rows / 2;
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
double distance = Math.Sqrt(Math.Pow(i - centerY, 2) + Math.Pow(j - centerX, 2));
float value = (float)Math.Exp(-(distance * distance) / (2 * d0 * d0));
filter.Data[i, j, 0] = value;
}
}
return filter.Mat;
}
}
@@ -0,0 +1,61 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: MeanFilterProcessor.cs
// 描述: 均值滤波算子,用于图像平滑
// 功能:
// - 均值滤波
// - 简单快速的平滑方法
// - 可调节核大小
// 算法: 均值滤波
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
using System.Drawing;
namespace ImageProcessing.Processors;
/// <summary>
/// 均值滤波算子
/// </summary>
public class MeanFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MeanFilterProcessor>();
public MeanFilterProcessor()
{
Name = LocalizationHelper.GetString("MeanFilterProcessor_Name");
Description = LocalizationHelper.GetString("MeanFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("MeanFilterProcessor_KernelSize"),
typeof(int),
5,
1,
31,
LocalizationHelper.GetString("MeanFilterProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int kernelSize = GetParameter<int>("KernelSize");
// 确保核大小为奇数
if (kernelSize % 2 == 0) kernelSize++;
var result = inputImage.Clone();
CvInvoke.Blur(inputImage, result, new Size(kernelSize, kernelSize), new Point(-1, -1));
_logger.Debug("Process: KernelSize = {KernelSize}", kernelSize);
return result;
}
}
@@ -0,0 +1,61 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: MedianFilterProcessor.cs
// 描述: 中值滤波算子,用于去除椒盐噪声
// 功能:
// - 中值滤波
// - 有效去除椒盐噪声
// - 保持边缘清晰
// - 可调节核大小
// 算法: 中值滤波
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 中值滤波算子
/// </summary>
public class MedianFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MedianFilterProcessor>();
public MedianFilterProcessor()
{
Name = LocalizationHelper.GetString("MedianFilterProcessor_Name");
Description = LocalizationHelper.GetString("MedianFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("MedianFilterProcessor_KernelSize"),
typeof(int),
5,
1,
31,
LocalizationHelper.GetString("MedianFilterProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int kernelSize = GetParameter<int>("KernelSize");
// 确保核大小为奇数
if (kernelSize % 2 == 0) kernelSize++;
var result = inputImage.Clone();
CvInvoke.MedianBlur(inputImage, result, kernelSize);
_logger.Debug("Process: KernelSize = {KernelSize}", kernelSize);
return result;
}
}
@@ -0,0 +1,123 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: ShockFilterProcessor.cs
// 描述: 冲击滤波算子,用于图像锐化和边缘增强
// 功能:
// - 基于PDE的图像锐化
// - 增强边缘同时保持平滑区域
// - 可调节迭代次数和滤波强度
// - 适用于模糊图像的恢复
// 算法: 冲击滤波器(Shock Filter)基于偏微分方程
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 冲击滤波算子
/// </summary>
public class ShockFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ShockFilterProcessor>();
public ShockFilterProcessor()
{
Name = LocalizationHelper.GetString("ShockFilterProcessor_Name");
Description = LocalizationHelper.GetString("ShockFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Iterations", new ProcessorParameter(
"Iterations",
LocalizationHelper.GetString("ShockFilterProcessor_Iterations"),
typeof(int),
5,
1,
20,
LocalizationHelper.GetString("ShockFilterProcessor_Iterations_Desc")));
Parameters.Add("Theta", new ProcessorParameter(
"Theta",
LocalizationHelper.GetString("ShockFilterProcessor_Theta"),
typeof(double),
0.5,
0.0,
2.0,
LocalizationHelper.GetString("ShockFilterProcessor_Theta_Desc")));
Parameters.Add("Dt", new ProcessorParameter(
"Dt",
LocalizationHelper.GetString("ShockFilterProcessor_Dt"),
typeof(double),
0.25,
0.1,
1.0,
LocalizationHelper.GetString("ShockFilterProcessor_Dt_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int iterations = GetParameter<int>("Iterations");
double theta = GetParameter<double>("Theta");
double dt = GetParameter<double>("Dt");
var result = inputImage.Convert<Gray, float>();
for (int iter = 0; iter < iterations; iter++)
{
result = ShockFilterIteration(result, theta, dt);
}
_logger.Debug("Process: Iterations = {Iterations}, Theta = {Theta}, Dt = {Dt}", iterations, theta, dt);
return result.Convert<Gray, byte>();
}
private Image<Gray, float> ShockFilterIteration(Image<Gray, float> input, double theta, double dt)
{
int width = input.Width;
int height = input.Height;
var output = new Image<Gray, float>(width, height);
for (int y = 1; y < height - 1; y++)
{
for (int x = 1; x < width - 1; x++)
{
float dx = (input.Data[y, x + 1, 0] - input.Data[y, x - 1, 0]) / 2.0f;
float dy = (input.Data[y + 1, x, 0] - input.Data[y - 1, x, 0]) / 2.0f;
float gradMag = (float)Math.Sqrt(dx * dx + dy * dy);
float dxx = input.Data[y, x + 1, 0] - 2 * input.Data[y, x, 0] + input.Data[y, x - 1, 0];
float dyy = input.Data[y + 1, x, 0] - 2 * input.Data[y, x, 0] + input.Data[y - 1, x, 0];
float laplacian = dxx + dyy;
float sign = laplacian > 0 ? 1.0f : -1.0f;
if (gradMag > theta)
{
output.Data[y, x, 0] = input.Data[y, x, 0] - (float)(dt * sign * gradMag);
}
else
{
output.Data[y, x, 0] = input.Data[y, x, 0];
}
}
}
for (int x = 0; x < width; x++)
{
output.Data[0, x, 0] = input.Data[0, x, 0];
output.Data[height - 1, x, 0] = input.Data[height - 1, x, 0];
}
for (int y = 0; y < height; y++)
{
output.Data[y, 0, 0] = input.Data[y, 0, 0];
output.Data[y, width - 1, 0] = input.Data[y, width - 1, 0];
}
return output;
}
}
@@ -0,0 +1,199 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: HorizontalEdgeProcessor.cs
// 描述: 水平边缘检测算子,专门用于检测水平方向的边缘
// 功能:
// - 检测水平边缘
// - 支持Prewitt和Sobel算子
// - 可调节检测灵敏度
// - 适用于检测水平线条和纹理
// 算法: Prewitt/Sobel水平算子
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// 水平边缘检测算子
/// </summary>
public class HorizontalEdgeProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HorizontalEdgeProcessor>();
public HorizontalEdgeProcessor()
{
Name = LocalizationHelper.GetString("HorizontalEdgeProcessor_Name");
Description = LocalizationHelper.GetString("HorizontalEdgeProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HorizontalEdgeProcessor_Method"),
typeof(string),
"Sobel",
null,
null,
LocalizationHelper.GetString("HorizontalEdgeProcessor_Method_Desc"),
new string[] { "Sobel", "Prewitt", "Simple" }));
Parameters.Add("Sensitivity", new ProcessorParameter(
"Sensitivity",
LocalizationHelper.GetString("HorizontalEdgeProcessor_Sensitivity"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("HorizontalEdgeProcessor_Sensitivity_Desc")));
Parameters.Add("Threshold", new ProcessorParameter(
"Threshold",
LocalizationHelper.GetString("HorizontalEdgeProcessor_Threshold"),
typeof(int),
20,
0,
255,
LocalizationHelper.GetString("HorizontalEdgeProcessor_Threshold_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double sensitivity = GetParameter<double>("Sensitivity");
int threshold = GetParameter<int>("Threshold");
Image<Gray, byte> result;
if (method == "Sobel")
{
result = ApplySobel(inputImage, sensitivity, threshold);
}
else if (method == "Prewitt")
{
result = ApplyPrewitt(inputImage, sensitivity, threshold);
}
else // Simple
{
result = ApplySimple(inputImage, sensitivity, threshold);
}
_logger.Debug("Process: Method = {Method}, Sensitivity = {Sensitivity}, Threshold = {Threshold}",
method, sensitivity, threshold);
return result;
}
private Image<Gray, byte> ApplySobel(Image<Gray, byte> inputImage, double sensitivity, int threshold)
{
// 使用Sobel算子检测水平边缘(Y方向导数)
Image<Gray, float> sobelY = new Image<Gray, float>(inputImage.Size);
CvInvoke.Sobel(inputImage, sobelY, DepthType.Cv32F, 0, 1, 3);
// 转换为绝对值并应用灵敏度
Image<Gray, byte> result = new Image<Gray, byte>(inputImage.Size);
CvInvoke.ConvertScaleAbs(sobelY, result, sensitivity, 0);
// 应用阈值
if (threshold > 0)
{
CvInvoke.Threshold(result, result, threshold, 255, ThresholdType.Binary);
CvInvoke.Threshold(result, result, 0, 255, ThresholdType.ToZero);
}
sobelY.Dispose();
return result;
}
private Image<Gray, byte> ApplyPrewitt(Image<Gray, byte> inputImage, double sensitivity, int threshold)
{
// Prewitt水平算子
// [ 1 1 1]
// [ 0 0 0]
// [-1 -1 -1]
int width = inputImage.Width;
int height = inputImage.Height;
byte[,,] inputData = inputImage.Data;
Image<Gray, byte> result = new Image<Gray, byte>(width, height);
byte[,,] outputData = result.Data;
for (int y = 1; y < height - 1; y++)
{
for (int x = 1; x < width - 1; x++)
{
int sum = 0;
// 上行
sum += inputData[y - 1, x - 1, 0];
sum += inputData[y - 1, x, 0];
sum += inputData[y - 1, x + 1, 0];
// 下行
sum -= inputData[y + 1, x - 1, 0];
sum -= inputData[y + 1, x, 0];
sum -= inputData[y + 1, x + 1, 0];
// 取绝对值并应用灵敏度
int value = (int)(Math.Abs(sum) * sensitivity);
// 应用阈值
if (value > threshold)
{
outputData[y, x, 0] = (byte)Math.Min(255, value);
}
else
{
outputData[y, x, 0] = 0;
}
}
}
return result;
}
private Image<Gray, byte> ApplySimple(Image<Gray, byte> inputImage, double sensitivity, int threshold)
{
// 简单差分算子
// [ 1 1 1]
// [ 0 0 0]
// [-1 -1 -1]
// 但权重更简单
int width = inputImage.Width;
int height = inputImage.Height;
byte[,,] inputData = inputImage.Data;
Image<Gray, byte> result = new Image<Gray, byte>(width, height);
byte[,,] outputData = result.Data;
for (int y = 1; y < height - 1; y++)
{
for (int x = 0; x < width; x++)
{
// 简单的上下差分
int diff = inputData[y - 1, x, 0] - inputData[y + 1, x, 0];
int value = (int)(Math.Abs(diff) * sensitivity);
// 应用阈值
if (value > threshold)
{
outputData[y, x, 0] = (byte)Math.Min(255, value);
}
else
{
outputData[y, x, 0] = 0;
}
}
}
return result;
}
}
@@ -0,0 +1,133 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: KirschEdgeProcessor.cs
// 描述: Kirsch边缘检测算子,用于检测图像边缘
// 功能:
// - Kirsch算子边缘检测
// - 8个方向的边缘检测
// - 输出最大响应方向的边缘
// - 对噪声敏感度低
// 算法: Kirsch算子(8方向模板)
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// Kirsch边缘检测算子
/// </summary>
public class KirschEdgeProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<KirschEdgeProcessor>();
// Kirsch算子的8个方向模板
private static readonly int[][,] KirschKernels = new int[8][,]
{
// N
new int[,] { { 5, 5, 5 }, { -3, 0, -3 }, { -3, -3, -3 } },
// NW
new int[,] { { 5, 5, -3 }, { 5, 0, -3 }, { -3, -3, -3 } },
// W
new int[,] { { 5, -3, -3 }, { 5, 0, -3 }, { 5, -3, -3 } },
// SW
new int[,] { { -3, -3, -3 }, { 5, 0, -3 }, { 5, 5, -3 } },
// S
new int[,] { { -3, -3, -3 }, { -3, 0, -3 }, { 5, 5, 5 } },
// SE
new int[,] { { -3, -3, -3 }, { -3, 0, 5 }, { -3, 5, 5 } },
// E
new int[,] { { -3, -3, 5 }, { -3, 0, 5 }, { -3, -3, 5 } },
// NE
new int[,] { { -3, 5, 5 }, { -3, 0, 5 }, { -3, -3, -3 } }
};
public KirschEdgeProcessor()
{
Name = LocalizationHelper.GetString("KirschEdgeProcessor_Name");
Description = LocalizationHelper.GetString("KirschEdgeProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Threshold", new ProcessorParameter(
"Threshold",
LocalizationHelper.GetString("KirschEdgeProcessor_Threshold"),
typeof(int),
100,
0,
1000,
LocalizationHelper.GetString("KirschEdgeProcessor_Threshold_Desc")));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("KirschEdgeProcessor_Scale"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("KirschEdgeProcessor_Scale_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int threshold = GetParameter<int>("Threshold");
double scale = GetParameter<double>("Scale");
int width = inputImage.Width;
int height = inputImage.Height;
byte[,,] inputData = inputImage.Data;
Image<Gray, byte> result = new Image<Gray, byte>(width, height);
byte[,,] outputData = result.Data;
// 对每个像素应用8个Kirsch模板,取最大响应
for (int y = 1; y < height - 1; y++)
{
for (int x = 1; x < width - 1; x++)
{
int maxResponse = 0;
// 对8个方向分别计算
for (int k = 0; k < 8; k++)
{
int sum = 0;
for (int ky = 0; ky < 3; ky++)
{
for (int kx = 0; kx < 3; kx++)
{
int pixelValue = inputData[y + ky - 1, x + kx - 1, 0];
sum += pixelValue * KirschKernels[k][ky, kx];
}
}
// 取绝对值
sum = Math.Abs(sum);
if (sum > maxResponse)
{
maxResponse = sum;
}
}
// 应用阈值和缩放
if (maxResponse > threshold)
{
int value = (int)(maxResponse * scale);
outputData[y, x, 0] = (byte)Math.Min(255, Math.Max(0, value));
}
else
{
outputData[y, x, 0] = 0;
}
}
}
_logger.Debug("Process: Threshold = {Threshold}, Scale = {Scale}", threshold, scale);
return result;
}
}
@@ -0,0 +1,135 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件名: SobelEdgeProcessor.cs
// 描述: Sobel边缘检测算子,用于检测图像边缘
// 功能:
// - Sobel算子边缘检测
// - 支持X方向、Y方向和组合检测
// - 可调节核大小
// - 输出边缘强度图
// 算法: Sobel算子
// 作者: 李伟 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using ImageProcessing.Core;
using Serilog;
namespace ImageProcessing.Processors;
/// <summary>
/// Sobel边缘检测算子
/// </summary>
public class SobelEdgeProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SobelEdgeProcessor>();
public SobelEdgeProcessor()
{
Name = LocalizationHelper.GetString("SobelEdgeProcessor_Name");
Description = LocalizationHelper.GetString("SobelEdgeProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Direction", new ProcessorParameter(
"Direction",
LocalizationHelper.GetString("SobelEdgeProcessor_Direction"),
typeof(string),
"Both",
null,
null,
LocalizationHelper.GetString("SobelEdgeProcessor_Direction_Desc"),
new string[] { "Both", "Horizontal", "Vertical" }));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("SobelEdgeProcessor_KernelSize"),
typeof(int),
3,
1,
7,
LocalizationHelper.GetString("SobelEdgeProcessor_KernelSize_Desc")));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("SobelEdgeProcessor_Scale"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("SobelEdgeProcessor_Scale_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string direction = GetParameter<string>("Direction");
int kernelSize = GetParameter<int>("KernelSize");
double scale = GetParameter<double>("Scale");
// 确保核大小为奇数
if (kernelSize % 2 == 0) kernelSize++;
if (kernelSize > 7) kernelSize = 7;
if (kernelSize < 1) kernelSize = 1;
Image<Gray, float> sobelX = new Image<Gray, float>(inputImage.Size);
Image<Gray, float> sobelY = new Image<Gray, float>(inputImage.Size);
Image<Gray, byte> result = new Image<Gray, byte>(inputImage.Size);
if (direction == "Horizontal" || direction == "Both")
{
// X方向(水平边缘)
CvInvoke.Sobel(inputImage, sobelX, DepthType.Cv32F, 1, 0, kernelSize);
}
if (direction == "Vertical" || direction == "Both")
{
// Y方向(垂直边缘)
CvInvoke.Sobel(inputImage, sobelY, DepthType.Cv32F, 0, 1, kernelSize);
}
if (direction == "Both")
{
// 计算梯度幅值:sqrt(Gx^2 + Gy^2)
Image<Gray, float> magnitude = new Image<Gray, float>(inputImage.Size);
// 手动计算幅值
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
float gx = sobelX.Data[y, x, 0];
float gy = sobelY.Data[y, x, 0];
magnitude.Data[y, x, 0] = (float)Math.Sqrt(gx * gx + gy * gy);
}
}
// 应用缩放并转换为字节类型
var scaled = magnitude * scale;
result = scaled.Convert<Gray, byte>();
magnitude.Dispose();
scaled.Dispose();
}
else if (direction == "Horizontal")
{
// 只使用X方向
CvInvoke.ConvertScaleAbs(sobelX, result, scale, 0);
}
else // Vertical
{
// 只使用Y方向
CvInvoke.ConvertScaleAbs(sobelY, result, scale, 0);
}
sobelX.Dispose();
sobelY.Dispose();
_logger.Debug("Process: Direction = {Direction}, KernelSize = {KernelSize}, Scale = {Scale}",
direction, kernelSize, scale);
return result;
}
}
+43
View File
@@ -0,0 +1,43 @@
using System;
using System.Windows;
using System.Windows.Controls.Primitives;
namespace ImageROIControl
{
/// <summary>
/// ROI控制点
/// </summary>
public class ControlThumb : Thumb
{
private static readonly Style? thumbStyle;
static ControlThumb()
{
try
{
ResourceDictionary dictionary = new ResourceDictionary();
dictionary.Source = new Uri("pack://application:,,,/ImageROIControl;component/Themes/Generic.xaml", UriKind.Absolute);
thumbStyle = (Style?)dictionary["AreaControlThumbStyle"];
}
catch
{
// 如果样式加载失败,使用默认样式
thumbStyle = null;
}
}
public ControlThumb()
{
if (thumbStyle != null)
{
Style = thumbStyle;
}
else
{
// 默认样式
Width = 12;
Height = 12;
}
}
}
}
+270
View File
@@ -0,0 +1,270 @@
using ImageROIControl.Models;
using System.Collections.ObjectModel;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
namespace ImageROIControl.Controls
{
/// <summary>
/// 图像ROI画布控件,支持图像显示、ROI编辑、缩放和平移
/// </summary>
public class ImageROICanvas : Control
{
private Canvas? roiCanvas;
private bool isDragging = false;
private Point mouseDownPoint = new Point();
private const double ZoomStep = 1.2;
private const double MinZoom = 0.1;
private const double MaxZoom = 10.0;
static ImageROICanvas()
{
DefaultStyleKeyProperty.OverrideMetadata(typeof(ImageROICanvas),
new FrameworkPropertyMetadata(typeof(ImageROICanvas)));
}
public ImageROICanvas()
{
Loaded += OnLoaded;
}
private void OnLoaded(object sender, RoutedEventArgs e)
{
roiCanvas = GetTemplateChild("PART_Canvas") as Canvas;
}
#region Dependency Properties
public static readonly DependencyProperty ImageSourceProperty =
DependencyProperty.Register(nameof(ImageSource), typeof(ImageSource), typeof(ImageROICanvas),
new PropertyMetadata(null, OnImageSourceChanged));
public ImageSource? ImageSource
{
get => (ImageSource?)GetValue(ImageSourceProperty);
set => SetValue(ImageSourceProperty, value);
}
private static void OnImageSourceChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var control = (ImageROICanvas)d;
if (e.NewValue is BitmapSource bitmap && control.roiCanvas != null)
{
control.ImageWidth = bitmap.PixelWidth;
control.ImageHeight = bitmap.PixelHeight;
}
}
public static readonly DependencyProperty ROIItemsProperty =
DependencyProperty.Register(nameof(ROIItems), typeof(ObservableCollection<ROIShape>), typeof(ImageROICanvas),
new PropertyMetadata(null));
public ObservableCollection<ROIShape>? ROIItems
{
get => (ObservableCollection<ROIShape>?)GetValue(ROIItemsProperty);
set => SetValue(ROIItemsProperty, value);
}
public static readonly DependencyProperty ZoomScaleProperty =
DependencyProperty.Register(nameof(ZoomScale), typeof(double), typeof(ImageROICanvas),
new PropertyMetadata(1.0));
public double ZoomScale
{
get => (double)GetValue(ZoomScaleProperty);
set => SetValue(ZoomScaleProperty, value);
}
public static readonly DependencyProperty ZoomCenterProperty =
DependencyProperty.Register(nameof(ZoomCenter), typeof(Point), typeof(ImageROICanvas),
new PropertyMetadata(new Point()));
public Point ZoomCenter
{
get => (Point)GetValue(ZoomCenterProperty);
set => SetValue(ZoomCenterProperty, value);
}
public static readonly DependencyProperty PanningOffsetXProperty =
DependencyProperty.Register(nameof(PanningOffsetX), typeof(double), typeof(ImageROICanvas),
new PropertyMetadata(0.0));
public double PanningOffsetX
{
get => (double)GetValue(PanningOffsetXProperty);
set => SetValue(PanningOffsetXProperty, value);
}
public static readonly DependencyProperty PanningOffsetYProperty =
DependencyProperty.Register(nameof(PanningOffsetY), typeof(double), typeof(ImageROICanvas),
new PropertyMetadata(0.0));
public double PanningOffsetY
{
get => (double)GetValue(PanningOffsetYProperty);
set => SetValue(PanningOffsetYProperty, value);
}
public static readonly DependencyProperty ImageWidthProperty =
DependencyProperty.Register(nameof(ImageWidth), typeof(double), typeof(ImageROICanvas),
new PropertyMetadata(800.0));
public double ImageWidth
{
get => (double)GetValue(ImageWidthProperty);
set => SetValue(ImageWidthProperty, value);
}
public static readonly DependencyProperty ImageHeightProperty =
DependencyProperty.Register(nameof(ImageHeight), typeof(double), typeof(ImageROICanvas),
new PropertyMetadata(600.0));
public double ImageHeight
{
get => (double)GetValue(ImageHeightProperty);
set => SetValue(ImageHeightProperty, value);
}
public static readonly DependencyProperty SelectedROIProperty =
DependencyProperty.Register(nameof(SelectedROI), typeof(ROIShape), typeof(ImageROICanvas),
new PropertyMetadata(null));
public ROIShape? SelectedROI
{
get => (ROIShape?)GetValue(SelectedROIProperty);
set => SetValue(SelectedROIProperty, value);
}
#endregion Dependency Properties
#region Mouse Events
protected override void OnMouseWheel(MouseWheelEventArgs e)
{
base.OnMouseWheel(e);
Point mousePos = e.GetPosition(this);
if (e.Delta > 0)
{
ZoomIn(mousePos);
}
else
{
ZoomOut(mousePos);
}
e.Handled = true;
}
protected override void OnMouseLeftButtonDown(MouseButtonEventArgs e)
{
base.OnMouseLeftButtonDown(e);
mouseDownPoint = e.GetPosition(this);
isDragging = false;
}
protected override void OnMouseMove(MouseEventArgs e)
{
base.OnMouseMove(e);
if (e.LeftButton == MouseButtonState.Pressed)
{
Point mousePoint = e.GetPosition(this);
double mouseMoveLength = (mousePoint - mouseDownPoint).Length;
if (mouseMoveLength > 10 / ZoomScale)
{
isDragging = true;
PanningOffsetX += mousePoint.X - mouseDownPoint.X;
PanningOffsetY += mousePoint.Y - mouseDownPoint.Y;
mouseDownPoint = mousePoint;
}
}
}
protected override void OnMouseLeftButtonUp(MouseButtonEventArgs e)
{
base.OnMouseLeftButtonUp(e);
if (!isDragging)
{
// 处理点击事件(如添加多边形顶点)
Point clickPoint = e.GetPosition(roiCanvas);
OnCanvasClicked(clickPoint);
}
isDragging = false;
}
#endregion Mouse Events
#region Zoom Methods
public void ZoomIn(Point center)
{
double newZoom = ZoomScale * ZoomStep;
if (newZoom <= MaxZoom)
{
ZoomCenter = center;
ZoomScale = newZoom;
}
}
public void ZoomOut(Point center)
{
double newZoom = ZoomScale / ZoomStep;
if (newZoom >= MinZoom)
{
ZoomCenter = center;
ZoomScale = newZoom;
}
}
public void ResetZoom()
{
ZoomScale = 1.0;
PanningOffsetX = 0;
PanningOffsetY = 0;
ZoomCenter = new Point();
}
#endregion Zoom Methods
#region Events
public static readonly RoutedEvent CanvasClickedEvent =
EventManager.RegisterRoutedEvent(nameof(CanvasClicked), RoutingStrategy.Bubble,
typeof(RoutedEventHandler), typeof(ImageROICanvas));
public event RoutedEventHandler CanvasClicked
{
add { AddHandler(CanvasClickedEvent, value); }
remove { RemoveHandler(CanvasClickedEvent, value); }
}
protected virtual void OnCanvasClicked(Point position)
{
var args = new CanvasClickedEventArgs(CanvasClickedEvent, position);
RaiseEvent(args);
}
#endregion Events
}
/// <summary>
/// 画布点击事件参数
/// </summary>
public class CanvasClickedEventArgs : RoutedEventArgs
{
public Point Position { get; }
public CanvasClickedEventArgs(RoutedEvent routedEvent, Point position) : base(routedEvent)
{
Position = position;
}
}
}
@@ -0,0 +1,102 @@
<UserControl x:Class="ImageROIControl.Controls.PolygonRoiCanvas"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:local="clr-namespace:ImageROIControl.Controls"
xmlns:models="clr-namespace:ImageROIControl.Models"
xmlns:converters="clr-namespace:ImageROIControl.Converters"
xmlns:behaviors="clr-namespace:ImageROIControl"
mc:Ignorable="d"
d:DesignHeight="450" d:DesignWidth="800"
x:Name="root"
Background="White">
<UserControl.Resources>
<behaviors:PointListToPointCollectionConverter x:Key="PointListToPointCollectionConverter" />
<converters:ROITypeToVisibilityConverter x:Key="ROITypeToVisibilityConverter" />
</UserControl.Resources>
<Border BorderBrush="Transparent" BorderThickness="1" ClipToBounds="True">
<Grid>
<Grid.ColumnDefinitions>
<ColumnDefinition Width="Auto" />
<ColumnDefinition Width="*" />
</Grid.ColumnDefinitions>
<!-- 左侧控制按钮 -->
<Border Grid.Column="0" Background="White" Padding="5">
<StackPanel Orientation="Vertical" VerticalAlignment="Top">
<Button x:Name="btnZoomIn" Content="+" Background="White" BorderBrush="LightGray" Width="40" Height="40" Margin="2" Click="BtnZoomIn_Click" />
<Button x:Name="btnZoomOut" Content="-" Background="White" BorderBrush="LightGray" Width="40" Height="40" Margin="2" Click="BtnZoomOut_Click" />
<Button x:Name="btnReset" Content="适应" Background="White" BorderBrush="LightGray" Width="40" Height="40" Margin="2" Click="BtnReset_Click" />
</StackPanel>
</Border>
<!-- 图像显示区域 -->
<Grid Grid.Column="1" x:Name="imageDisplayGrid" ClipToBounds="True">
<Grid x:Name="transformGrid"
RenderTransformOrigin="0,0"
HorizontalAlignment="Center"
VerticalAlignment="Center">
<Grid.RenderTransform>
<TransformGroup>
<ScaleTransform x:Name="scaleTransform"
ScaleX="{Binding ZoomScale, ElementName=root}"
ScaleY="{Binding ZoomScale, ElementName=root}" />
<TranslateTransform x:Name="translateTransform"
X="{Binding PanOffsetX, ElementName=root}"
Y="{Binding PanOffsetY, ElementName=root}" />
</TransformGroup>
</Grid.RenderTransform>
<Canvas x:Name="mainCanvas"
Width="{Binding CanvasWidth, ElementName=root}"
Height="{Binding CanvasHeight, ElementName=root}"
Background="Transparent"
MouseWheel="Canvas_MouseWheel"
MouseLeftButtonDown="Canvas_MouseLeftButtonDown"
MouseLeftButtonUp="Canvas_MouseLeftButtonUp"
MouseMove="Canvas_MouseMove"
MouseRightButtonDown="Canvas_MouseRightButtonDown">
<!-- 背景图像 -->
<Image x:Name="backgroundImage"
Source="{Binding ImageSource, ElementName=root}"
Width="{Binding CanvasWidth, ElementName=root}"
Height="{Binding CanvasHeight, ElementName=root}"
Stretch="Fill" />
<!-- ROI显示 - 只支持多边形 -->
<ItemsControl ItemsSource="{Binding ROIItems, ElementName=root}">
<ItemsControl.ItemsPanel>
<ItemsPanelTemplate>
<Canvas />
</ItemsPanelTemplate>
</ItemsControl.ItemsPanel>
<ItemsControl.ItemTemplate>
<DataTemplate>
<!-- 多边形ROI -->
<Polygon x:Name="polygonShape"
behaviors:PolygonPointsBehavior.PointsSource="{Binding Points}"
Stroke="{Binding Color}"
StrokeThickness="1"
Fill="Transparent"
MouseLeftButtonDown="ROI_MouseLeftButtonDown" />
</DataTemplate>
</ItemsControl.ItemTemplate>
</ItemsControl>
</Canvas>
</Grid>
<!-- 缩放比例显示 -->
<TextBlock Text="{Binding ZoomScale, ElementName=root, StringFormat=Zoom Scale: {0:P0}}"
HorizontalAlignment="Right"
VerticalAlignment="Bottom"
Margin="10"
Padding="5"
Background="#AA000000"
Foreground="White"
FontSize="10" />
</Grid>
</Grid>
</Border>
</UserControl>
@@ -0,0 +1,527 @@
using ImageROIControl.Models;
using System;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Shapes;
namespace ImageROIControl.Controls
{
public partial class PolygonRoiCanvas : UserControl
{
private bool isDragging = false;
private Point lastMousePosition;
private const double ZoomStep = 1.2;
private Adorner? currentAdorner;
public PolygonRoiCanvas()
{
InitializeComponent();
Loaded += PolygonRoiCanvas_Loaded;
}
private void PolygonRoiCanvas_Loaded(object sender, RoutedEventArgs e)
{
// 监听ROI集合变化
if (ROIItems != null)
{
ROIItems.CollectionChanged += ROIItems_CollectionChanged;
foreach (var roi in ROIItems)
{
roi.PropertyChanged += ROI_PropertyChanged;
// 如果是多边形ROI,监听Points集合变化
if (roi is PolygonROI polygonROI)
{
polygonROI.Points.CollectionChanged += Points_CollectionChanged;
}
}
}
}
private void ROIItems_CollectionChanged(object? sender, NotifyCollectionChangedEventArgs e)
{
if (e.NewItems != null)
{
foreach (ROIShape roi in e.NewItems)
{
roi.PropertyChanged += ROI_PropertyChanged;
// 如果是多边形ROI,监听Points集合变化
if (roi is PolygonROI polygonROI)
{
polygonROI.Points.CollectionChanged += Points_CollectionChanged;
}
}
}
if (e.OldItems != null)
{
foreach (ROIShape roi in e.OldItems)
{
roi.PropertyChanged -= ROI_PropertyChanged;
// 取消监听Points集合变化
if (roi is PolygonROI polygonROI)
{
polygonROI.Points.CollectionChanged -= Points_CollectionChanged;
}
}
}
}
private void Points_CollectionChanged(object? sender, System.Collections.Specialized.NotifyCollectionChangedEventArgs e)
{
// 只在删除或添加顶点时更新Adorner,拖拽时的Replace操作不触发更新
if (e.Action == System.Collections.Specialized.NotifyCollectionChangedAction.Remove ||
e.Action == System.Collections.Specialized.NotifyCollectionChangedAction.Add)
{
// Points集合变化时,如果当前选中的是多边形ROI,更新Adorner
if (SelectedROI is PolygonROI polygonROI && sender == polygonROI.Points)
{
// 使用Dispatcher延迟更新,确保UI已经处理完Points的变化
Dispatcher.BeginInvoke(new Action(() =>
{
UpdateAdorner();
}), System.Windows.Threading.DispatcherPriority.Render);
}
}
// Replace操作(拖拽时)不需要重建Adorner,只需要让现有Adorner重新布局
}
private void ROI_PropertyChanged(object? sender, System.ComponentModel.PropertyChangedEventArgs e)
{
if (e.PropertyName == nameof(ROIShape.IsSelected))
{
UpdateAdorner();
}
// 监听Points属性变化(整个集合替换的情况)
else if (e.PropertyName == "Points" && sender is PolygonROI)
{
UpdateAdorner();
}
}
#region Dependency Properties
public static readonly DependencyProperty ImageSourceProperty =
DependencyProperty.Register(nameof(ImageSource), typeof(ImageSource), typeof(PolygonRoiCanvas),
new PropertyMetadata(null, OnImageSourceChanged));
public ImageSource? ImageSource
{
get => (ImageSource?)GetValue(ImageSourceProperty);
set => SetValue(ImageSourceProperty, value);
}
private static void OnImageSourceChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var control = (PolygonRoiCanvas)d;
if (e.NewValue is ImageSource imageSource)
{
control.CanvasWidth = imageSource.Width;
control.CanvasHeight = imageSource.Height;
}
}
public static readonly DependencyProperty ROIItemsProperty =
DependencyProperty.Register(nameof(ROIItems), typeof(ObservableCollection<ROIShape>), typeof(PolygonRoiCanvas),
new PropertyMetadata(null));
public ObservableCollection<ROIShape>? ROIItems
{
get => (ObservableCollection<ROIShape>?)GetValue(ROIItemsProperty);
set => SetValue(ROIItemsProperty, value);
}
public static readonly DependencyProperty ZoomScaleProperty =
DependencyProperty.Register(nameof(ZoomScale), typeof(double), typeof(PolygonRoiCanvas),
new PropertyMetadata(1.0, OnZoomScaleChanged));
public double ZoomScale
{
get => (double)GetValue(ZoomScaleProperty);
set => SetValue(ZoomScaleProperty, Math.Max(0.1, Math.Min(10.0, value)));
}
private static void OnZoomScaleChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var control = (PolygonRoiCanvas)d;
// 缩放变化时更新Adorner以调整控制点大小
control.UpdateAdorner();
}
public static readonly DependencyProperty PanOffsetXProperty =
DependencyProperty.Register(nameof(PanOffsetX), typeof(double), typeof(PolygonRoiCanvas),
new PropertyMetadata(0.0, OnPanOffsetChanged));
public double PanOffsetX
{
get => (double)GetValue(PanOffsetXProperty);
set => SetValue(PanOffsetXProperty, value);
}
public static readonly DependencyProperty PanOffsetYProperty =
DependencyProperty.Register(nameof(PanOffsetY), typeof(double), typeof(PolygonRoiCanvas),
new PropertyMetadata(0.0, OnPanOffsetChanged));
public double PanOffsetY
{
get => (double)GetValue(PanOffsetYProperty);
set => SetValue(PanOffsetYProperty, value);
}
private static void OnPanOffsetChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var control = (PolygonRoiCanvas)d;
// 平移时重建Adorner,确保控制点位置正确
if (control.SelectedROI != null && control.SelectedROI.IsSelected)
{
control.UpdateAdorner();
}
}
public static readonly DependencyProperty CanvasWidthProperty =
DependencyProperty.Register(nameof(CanvasWidth), typeof(double), typeof(PolygonRoiCanvas),
new PropertyMetadata(800.0));
public double CanvasWidth
{
get => (double)GetValue(CanvasWidthProperty);
set => SetValue(CanvasWidthProperty, value);
}
public static readonly DependencyProperty CanvasHeightProperty =
DependencyProperty.Register(nameof(CanvasHeight), typeof(double), typeof(PolygonRoiCanvas),
new PropertyMetadata(600.0));
public double CanvasHeight
{
get => (double)GetValue(CanvasHeightProperty);
set => SetValue(CanvasHeightProperty, value);
}
public static readonly DependencyProperty SelectedROIProperty =
DependencyProperty.Register(nameof(SelectedROI), typeof(ROIShape), typeof(PolygonRoiCanvas),
new PropertyMetadata(null, OnSelectedROIChanged));
public ROIShape? SelectedROI
{
get => (ROIShape?)GetValue(SelectedROIProperty);
set => SetValue(SelectedROIProperty, value);
}
private static void OnSelectedROIChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var control = (PolygonRoiCanvas)d;
// 更新IsSelected状态
if (e.OldValue is ROIShape oldROI)
{
oldROI.IsSelected = false;
}
if (e.NewValue is ROIShape newROI)
{
newROI.IsSelected = true;
}
control.UpdateAdorner();
}
#endregion Dependency Properties
#region Adorner Management
private void UpdateAdorner()
{
// 移除旧的Adorner
if (currentAdorner != null)
{
var adornerLayer = AdornerLayer.GetAdornerLayer(mainCanvas);
if (adornerLayer != null)
{
adornerLayer.Remove(currentAdorner);
}
currentAdorner = null;
}
// 为选中的ROI添加Adorner
if (SelectedROI != null && SelectedROI.IsSelected)
{
// 查找对应的UI元素
var container = FindROIVisual(SelectedROI);
if (container != null)
{
var adornerLayer = AdornerLayer.GetAdornerLayer(mainCanvas);
if (adornerLayer != null)
{
double scaleFactor = 1.0 / ZoomScale;
if (SelectedROI is PolygonROI polygonROI)
{
currentAdorner = new PolygonAdorner(container, scaleFactor, polygonROI);
}
if (currentAdorner != null)
{
adornerLayer.Add(currentAdorner);
}
}
}
}
}
private UIElement? FindROIVisual(ROIShape roi)
{
// 在ItemsControl中查找对应的视觉元素
var itemsControl = FindVisualChild<ItemsControl>(mainCanvas);
if (itemsControl != null)
{
for (int i = 0; i < itemsControl.Items.Count; i++)
{
if (itemsControl.Items[i] == roi)
{
// 尝试获取容器
var container = itemsControl.ItemContainerGenerator.ContainerFromIndex(i) as ContentPresenter;
// 如果容器还没生成,尝试强制生成
if (container == null)
{
// 强制生成容器
itemsControl.UpdateLayout();
container = itemsControl.ItemContainerGenerator.ContainerFromIndex(i) as ContentPresenter;
}
if (container != null)
{
// 查找实际的形状元素(只支持多边形)
if (roi is PolygonROI)
{
return FindVisualChild<Polygon>(container);
}
}
}
}
}
return null;
}
private T? FindVisualChild<T>(DependencyObject parent) where T : DependencyObject
{
for (int i = 0; i < VisualTreeHelper.GetChildrenCount(parent); i++)
{
var child = VisualTreeHelper.GetChild(parent, i);
if (child is T result)
{
return result;
}
var childOfChild = FindVisualChild<T>(child);
if (childOfChild != null)
{
return childOfChild;
}
}
return null;
}
#endregion Adorner Management
#region Mouse Events
private void Canvas_MouseWheel(object sender, MouseWheelEventArgs e)
{
// 获取鼠标在 imageDisplayGrid 中的位置
Point mousePos = e.GetPosition(imageDisplayGrid);
// 获取鼠标在 Canvas 中的位置(缩放前)
Point mousePosOnCanvas = e.GetPosition(mainCanvas);
double oldZoom = ZoomScale;
double newZoom = oldZoom;
if (e.Delta > 0)
{
newZoom = oldZoom * ZoomStep;
}
else
{
newZoom = oldZoom / ZoomStep;
}
// 限制缩放范围
newZoom = Math.Max(0.1, Math.Min(10.0, newZoom));
if (Math.Abs(newZoom - oldZoom) > 0.001)
{
// 计算缩放比例变化
double scale = newZoom / oldZoom;
// 更新缩放
ZoomScale = newZoom;
// 调整平移偏移,使鼠标位置保持不变
// 新的偏移 = 旧偏移 + 鼠标位置 - 鼠标位置 * 缩放比例
PanOffsetX = mousePos.X - (mousePos.X - PanOffsetX) * scale;
PanOffsetY = mousePos.Y - (mousePos.Y - PanOffsetY) * scale;
}
e.Handled = true;
}
private void Canvas_MouseLeftButtonDown(object sender, MouseButtonEventArgs e)
{
lastMousePosition = e.GetPosition(imageDisplayGrid);
isDragging = false;
mainCanvas.CaptureMouse();
}
private void Canvas_MouseMove(object sender, MouseEventArgs e)
{
if (e.LeftButton == MouseButtonState.Pressed && mainCanvas.IsMouseCaptured)
{
Point currentPosition = e.GetPosition(imageDisplayGrid);
Vector delta = currentPosition - lastMousePosition;
if (delta.Length > 5)
{
isDragging = true;
PanOffsetX += delta.X;
PanOffsetY += delta.Y;
lastMousePosition = currentPosition;
}
}
}
private void Canvas_MouseLeftButtonUp(object sender, MouseButtonEventArgs e)
{
mainCanvas.ReleaseMouseCapture();
if (!isDragging)
{
// 处理点击事件
Point clickPosition = e.GetPosition(mainCanvas);
OnCanvasClicked(clickPosition);
}
isDragging = false;
}
private void Canvas_MouseRightButtonDown(object sender, MouseButtonEventArgs e)
{
// 右键点击完成多边形
OnRightClick();
e.Handled = true;
}
private void ROI_MouseLeftButtonDown(object sender, MouseButtonEventArgs e)
{
// 选择ROI
if (sender is FrameworkElement element && element.DataContext is ROIShape roi)
{
SelectedROI = roi;
e.Handled = true;
}
}
#endregion Mouse Events
#region Public Methods
public void ResetView()
{
// 自动适应显示窗口 (类似 PictureBox SizeMode.Zoom)
ZoomScale = 1.0;
PanOffsetX = 0;
PanOffsetY = 0;
if (imageDisplayGrid != null && CanvasWidth > 0 && CanvasHeight > 0)
{
// 使用 Dispatcher 延迟执行,确保布局已完成
Dispatcher.BeginInvoke(new Action(() =>
{
// 获取图像显示区域的实际尺寸
double viewportWidth = imageDisplayGrid.ActualWidth;
double viewportHeight = imageDisplayGrid.ActualHeight;
if (viewportWidth > 0 && viewportHeight > 0)
{
// 计算宽度和高度的缩放比例
double scaleX = viewportWidth / CanvasWidth;
double scaleY = viewportHeight / CanvasHeight;
// 选择较小的缩放比例,确保图像完全显示在窗口内(保持宽高比)
ZoomScale = Math.Min(scaleX, scaleY);
// 居中显示由 Grid 的 HorizontalAlignment 和 VerticalAlignment 自动处理
PanOffsetX = 0;
PanOffsetY = 0;
}
}), System.Windows.Threading.DispatcherPriority.Loaded);
}
}
private void BtnZoomIn_Click(object sender, RoutedEventArgs e)
{
double newZoom = ZoomScale * 1.2;
if (newZoom <= 10.0)
{
ZoomScale = newZoom;
}
}
private void BtnZoomOut_Click(object sender, RoutedEventArgs e)
{
double newZoom = ZoomScale / 1.2;
if (newZoom >= 0.1)
{
ZoomScale = newZoom;
}
}
private void BtnReset_Click(object sender, RoutedEventArgs e)
{
ResetView();
}
#endregion Public Methods
#region Events
public static readonly RoutedEvent CanvasClickedEvent =
EventManager.RegisterRoutedEvent(nameof(CanvasClicked), RoutingStrategy.Bubble,
typeof(RoutedEventHandler), typeof(PolygonRoiCanvas));
public event RoutedEventHandler CanvasClicked
{
add { AddHandler(CanvasClickedEvent, value); }
remove { RemoveHandler(CanvasClickedEvent, value); }
}
protected virtual void OnCanvasClicked(Point position)
{
var args = new CanvasClickedEventArgs(CanvasClickedEvent, position);
RaiseEvent(args);
}
public static readonly RoutedEvent RightClickEvent =
EventManager.RegisterRoutedEvent(nameof(RightClick), RoutingStrategy.Bubble,
typeof(RoutedEventHandler), typeof(PolygonRoiCanvas));
public event RoutedEventHandler RightClick
{
add { AddHandler(RightClickEvent, value); }
remove { RemoveHandler(RightClickEvent, value); }
}
protected virtual void OnRightClick()
{
RaiseEvent(new RoutedEventArgs(RightClickEvent));
}
#endregion Events
}
}
+20
View File
@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<UseWPF>true</UseWPF>
<Nullable>enable</Nullable>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<PackageId>ImageROIControl</PackageId>
<Version>1.0.0</Version>
<Authors>Your Name</Authors>
<Company>Your Company</Company>
<Description>WPF图像ROI编辑控件库,支持矩形、椭圆、多边形ROI的创建、编辑、缩放和平移功能</Description>
<PackageTags>WPF;ROI;Image;Polygon;Rectangle;Ellipse;Zoom;Pan</PackageTags>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Xaml.Behaviors.Wpf" Version="1.1.39" />
</ItemGroup>
</Project>
+84
View File
@@ -0,0 +1,84 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Runtime.CompilerServices;
using System.Windows;
namespace ImageROIControl.Models
{
/// <summary>
/// ROI形状类型
/// </summary>
public enum ROIType
{
Polygon
}
/// <summary>
/// ROI基类
/// </summary>
public abstract class ROIShape : INotifyPropertyChanged
{
private bool _isSelected;
private string _id = Guid.NewGuid().ToString();
private string _color = "Red";
public string Id
{
get => _id;
set { _id = value; OnPropertyChanged(); }
}
public bool IsSelected
{
get => _isSelected;
set { _isSelected = value; OnPropertyChanged(); }
}
public string Color
{
get => _color;
set { _color = value; OnPropertyChanged(); }
}
public abstract ROIType ROIType { get; }
public event PropertyChangedEventHandler? PropertyChanged;
public void OnPropertyChanged([CallerMemberName] string? propertyName = null)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
}
}
/// <summary>
/// 多边形ROI
/// </summary>
public class PolygonROI : ROIShape
{
private ObservableCollection<Point> _points = new ObservableCollection<Point>();
public override ROIType ROIType => ROIType.Polygon;
public ObservableCollection<Point> Points
{
get => _points;
set { _points = value; OnPropertyChanged(); }
}
/// <summary>
/// 用于JSON序列化的Points列表(不参与UI绑定)
/// </summary>
[System.Text.Json.Serialization.JsonPropertyName("PointsList")]
public List<Point> PointsList
{
get => new List<Point>(_points);
set
{
_points = new ObservableCollection<Point>(value ?? new List<Point>());
OnPropertyChanged(nameof(Points));
}
}
}
}
+68
View File
@@ -0,0 +1,68 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Windows;
using System.Windows.Data;
using System.Windows.Media;
namespace ImageROIControl
{
/// <summary>
/// 将Point列表转换为PointCollection,支持ObservableCollection变化通知
/// </summary>
public class PointListToPointCollectionConverter : IValueConverter
{
public object? Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
if (value is List<Point> pointList)
{
return new PointCollection(pointList);
}
else if (value is ObservableCollection<Point> observablePoints)
{
var pointCollection = new PointCollection(observablePoints);
return pointCollection;
}
else if (value is IEnumerable<Point> enumerable)
{
return new PointCollection(enumerable);
}
return new PointCollection();
}
public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture)
{
if (value is PointCollection pointCollection)
{
var list = new ObservableCollection<Point>();
foreach (Point p in pointCollection)
{
list.Add(p);
}
return list;
}
return new ObservableCollection<Point>();
}
}
/// <summary>
/// 索引转换为位置标签
/// </summary>
public class IndexToPositionConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
if (value is int index)
{
return (index + 1).ToString();
}
return "0";
}
public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
+142
View File
@@ -0,0 +1,142 @@
using System.Collections.Generic;
using System.Windows;
using System.Windows.Controls.Primitives;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Shapes;
namespace ImageROIControl
{
/// <summary>
/// 多边形装饰器,用于编辑多边形顶点
/// </summary>
public class PolygonAdorner : Adorner
{
private List<ControlThumb> vertexThumbs = new List<ControlThumb>(); // 顶点控制点
private VisualCollection visualChildren;
private double scaleFactor = 1;
private Models.PolygonROI? polygonROI;
public PolygonAdorner(UIElement adornedElement, double scaleFactor = 1, Models.PolygonROI? roiModel = null)
: base(adornedElement)
{
visualChildren = new VisualCollection(this);
this.scaleFactor = scaleFactor;
this.polygonROI = roiModel;
// 使用ROI模型的Points数量而不是Polygon的Points
int pointCount = polygonROI?.Points.Count ?? 0;
// 创建顶点控制点
for (int i = 0; i < pointCount; i++)
{
var thumb = new ControlThumb();
thumb.DragDelta += HandleDrag;
thumb.DragCompleted += HandleDragCompleted;
thumb.MouseRightButtonDown += HandleRightClick;
thumb.Tag = i;
thumb.Cursor = Cursors.Hand;
vertexThumbs.Add(thumb);
visualChildren.Add(thumb);
}
// 不再创建边中点控制点 - 使用智能插入算法代替
// 用户可以直接点击画布,系统会自动找到最近的边并插入顶点
}
private void HandleDrag(object sender, DragDeltaEventArgs args)
{
Thumb? hitThumb = sender as Thumb;
if (hitThumb == null || polygonROI == null) return;
int index = (int)hitThumb.Tag;
// 直接修改ROI模型的Points
if (index < polygonROI.Points.Count)
{
Point currentPoint = polygonROI.Points[index];
Point newPoint = new Point(
currentPoint.X + args.HorizontalChange,
currentPoint.Y + args.VerticalChange
);
// 使用索引器修改ObservableCollection中的元素
polygonROI.Points[index] = newPoint;
}
// 强制重新布局
InvalidateArrange();
}
private void HandleDragCompleted(object sender, DragCompletedEventArgs args)
{
// 拖拽完成后通知模型更新
if (polygonROI != null)
{
polygonROI.OnPropertyChanged(nameof(polygonROI.Points));
}
}
private void HandleRightClick(object sender, MouseButtonEventArgs e)
{
// 右键删除顶点(至少保留3个顶点)
if (polygonROI != null && polygonROI.Points.Count > 3)
{
Thumb? hitThumb = sender as Thumb;
if (hitThumb != null)
{
int index = (int)hitThumb.Tag;
// 删除顶点 - ObservableCollection会自动触发CollectionChanged事件
// PolygonRoiCanvas会监听到这个变化并自动更新Adorner
polygonROI.Points.RemoveAt(index);
e.Handled = true;
}
}
}
protected override Size ArrangeOverride(Size finalSize)
{
// 使用ROI模型的Points而不是Polygon的Points
if (polygonROI != null)
{
double thumbSize = 12 * scaleFactor;
// 布局顶点控制点
for (int i = 0; i < vertexThumbs.Count && i < polygonROI.Points.Count; i++)
{
vertexThumbs[i].Arrange(new Rect(
polygonROI.Points[i].X - (thumbSize / 2),
polygonROI.Points[i].Y - (thumbSize / 2),
thumbSize,
thumbSize));
}
}
else
{
// 备用方案:使用Polygon的Points
Polygon poly = (Polygon)AdornedElement;
double thumbSize = 12 * scaleFactor;
for (int i = 0; i < vertexThumbs.Count && i < poly.Points.Count; i++)
{
vertexThumbs[i].Arrange(new Rect(
poly.Points[i].X - (thumbSize / 2),
poly.Points[i].Y - (thumbSize / 2),
thumbSize,
thumbSize));
}
}
return finalSize;
}
protected override int VisualChildrenCount
{ get { return visualChildren.Count; } }
protected override Visual GetVisualChild(int index)
{ return visualChildren[index]; }
}
}
+94
View File
@@ -0,0 +1,94 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Windows;
using System.Windows.Media;
using System.Windows.Shapes;
namespace ImageROIControl
{
/// <summary>
/// 多边形Points附加行为,支持ObservableCollection绑定
/// </summary>
public static class PolygonPointsBehavior
{
private static readonly Dictionary<Polygon, ObservableCollection<Point>> _attachedCollections
= new Dictionary<Polygon, ObservableCollection<Point>>();
public static ObservableCollection<Point> GetPointsSource(DependencyObject obj)
{
return (ObservableCollection<Point>)obj.GetValue(PointsSourceProperty);
}
public static void SetPointsSource(DependencyObject obj, ObservableCollection<Point> value)
{
obj.SetValue(PointsSourceProperty, value);
}
public static readonly DependencyProperty PointsSourceProperty =
DependencyProperty.RegisterAttached(
"PointsSource",
typeof(ObservableCollection<Point>),
typeof(PolygonPointsBehavior),
new PropertyMetadata(null, OnPointsSourceChanged));
private static void OnPointsSourceChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
if (!(d is Polygon polygon))
return;
// 清理旧的订阅
if (e.OldValue is ObservableCollection<Point> oldCollection)
{
oldCollection.CollectionChanged -= GetCollectionChangedHandler(polygon);
_attachedCollections.Remove(polygon);
}
// 设置新的订阅
if (e.NewValue is ObservableCollection<Point> newCollection)
{
// 初始化Points
UpdatePolygonPoints(polygon, newCollection);
// 监听集合变化
NotifyCollectionChangedEventHandler handler = (s, args) => UpdatePolygonPoints(polygon, newCollection);
newCollection.CollectionChanged += handler;
// 保存引用以便后续清理
_attachedCollections[polygon] = newCollection;
// 监听Polygon卸载事件以清理资源
polygon.Unloaded += (s, args) =>
{
if (_attachedCollections.TryGetValue(polygon, out var collection))
{
collection.CollectionChanged -= handler;
_attachedCollections.Remove(polygon);
}
};
}
}
private static NotifyCollectionChangedEventHandler GetCollectionChangedHandler(Polygon polygon)
{
return (s, args) =>
{
if (s is ObservableCollection<Point> collection)
{
UpdatePolygonPoints(polygon, collection);
}
};
}
private static void UpdatePolygonPoints(Polygon polygon, ObservableCollection<Point> points)
{
// 使用Dispatcher确保在UI线程更新
polygon.Dispatcher.BeginInvoke(new Action(() =>
{
// 创建新的PointCollection以触发UI更新
polygon.Points = new PointCollection(points);
}), System.Windows.Threading.DispatcherPriority.Render);
}
}
}
+133
View File
@@ -0,0 +1,133 @@
using ImageROIControl.Models;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Windows;
namespace ImageROIControl
{
/// <summary>
/// ROI序列化工具类
/// </summary>
public static class ROISerializer
{
private static readonly JsonSerializerOptions Options = new JsonSerializerOptions
{
WriteIndented = true,
Converters = { new PointConverter(), new ROIShapeConverter() }
};
/// <summary>
/// 保存ROI列表到JSON文件
/// </summary>
public static void SaveToFile(IEnumerable<ROIShape> roiList, string filePath)
{
var json = JsonSerializer.Serialize(roiList, Options);
File.WriteAllText(filePath, json);
}
/// <summary>
/// 从JSON文件加载ROI列表
/// </summary>
public static List<ROIShape> LoadFromFile(string filePath)
{
var json = File.ReadAllText(filePath);
return JsonSerializer.Deserialize<List<ROIShape>>(json, Options) ?? new List<ROIShape>();
}
/// <summary>
/// 序列化ROI列表为JSON字符串
/// </summary>
public static string Serialize(IEnumerable<ROIShape> roiList)
{
return JsonSerializer.Serialize(roiList, Options);
}
/// <summary>
/// 从JSON字符串反序列化ROI列表
/// </summary>
public static List<ROIShape> Deserialize(string json)
{
return JsonSerializer.Deserialize<List<ROIShape>>(json, Options) ?? new List<ROIShape>();
}
}
/// <summary>
/// Point类型的JSON转换器
/// </summary>
public class PointConverter : JsonConverter<Point>
{
public override Point Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException();
double x = 0, y = 0;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
return new Point(x, y);
if (reader.TokenType == JsonTokenType.PropertyName)
{
string? propertyName = reader.GetString();
reader.Read();
switch (propertyName)
{
case "X":
x = reader.GetDouble();
break;
case "Y":
y = reader.GetDouble();
break;
}
}
}
throw new JsonException();
}
public override void Write(Utf8JsonWriter writer, Point value, JsonSerializerOptions options)
{
writer.WriteStartObject();
writer.WriteNumber("X", value.X);
writer.WriteNumber("Y", value.Y);
writer.WriteEndObject();
}
}
/// <summary>
/// ROIShape多态类型的JSON转换器
/// </summary>
public class ROIShapeConverter : JsonConverter<ROIShape>
{
public override ROIShape? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
using (JsonDocument doc = JsonDocument.ParseValue(ref reader))
{
var root = doc.RootElement;
if (!root.TryGetProperty("ROIType", out var typeElement))
throw new JsonException("Missing ROIType property");
var roiType = (ROIType)typeElement.GetInt32();
return roiType switch
{
ROIType.Polygon => JsonSerializer.Deserialize<PolygonROI>(root.GetRawText(), options),
_ => throw new JsonException($"Unknown ROIType: {roiType}")
};
}
}
public override void Write(Utf8JsonWriter writer, ROIShape value, JsonSerializerOptions options)
{
JsonSerializer.Serialize(writer, value, value.GetType(), options);
}
}
}
@@ -0,0 +1,26 @@
using ImageROIControl.Models;
using System;
using System.Globalization;
using System.Windows;
using System.Windows.Data;
namespace ImageROIControl.Converters
{
public class ROITypeToVisibilityConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
if (value is ROIType roiType && parameter is string targetTypeName)
{
bool match = roiType.ToString() == targetTypeName;
return match ? Visibility.Visible : Visibility.Collapsed;
}
return Visibility.Collapsed;
}
public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
+108
View File
@@ -0,0 +1,108 @@
<ResourceDictionary
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="clr-namespace:ImageROIControl.Controls"
xmlns:models="clr-namespace:ImageROIControl.Models">
<!-- ControlThumb样式 - 14*14灰色矩形 -->
<Style x:Key="AreaControlThumbStyle" TargetType="{x:Type Thumb}">
<Setter Property="Width" Value="14" />
<Setter Property="Height" Value="14" />
<Setter Property="Cursor" Value="Hand" />
<Setter Property="Template">
<Setter.Value>
<ControlTemplate TargetType="{x:Type Thumb}">
<Grid>
<!-- 灰色矩形 -->
<Rectangle x:Name="ThumbRect"
Fill="#FF808080"
Stroke="White"
StrokeThickness="1"
Width="14"
Height="14" />
</Grid>
<ControlTemplate.Triggers>
<!-- 鼠标悬停效果 -->
<Trigger Property="IsMouseOver" Value="True">
<Setter TargetName="ThumbRect" Property="Fill" Value="#FF999999" />
<Setter TargetName="ThumbRect" Property="StrokeThickness" Value="2" />
</Trigger>
<!-- 拖拽时效果 -->
<Trigger Property="IsDragging" Value="True">
<Setter TargetName="ThumbRect" Property="Fill" Value="#FFAAAAAA" />
<Setter TargetName="ThumbRect" Property="StrokeThickness" Value="2" />
<Setter Property="Cursor" Value="SizeAll" />
</Trigger>
</ControlTemplate.Triggers>
</ControlTemplate>
</Setter.Value>
</Setter>
</Style>
<!-- ImageROICanvas控件模板 -->
<Style TargetType="{x:Type local:ImageROICanvas}">
<Setter Property="Background" Value="LightGray" />
<Setter Property="ClipToBounds" Value="True" />
<Setter Property="Template">
<Setter.Value>
<ControlTemplate TargetType="{x:Type local:ImageROICanvas}">
<Border Background="{TemplateBinding Background}"
BorderBrush="{TemplateBinding BorderBrush}"
BorderThickness="{TemplateBinding BorderThickness}"
ClipToBounds="True">
<Grid>
<ScrollViewer HorizontalScrollBarVisibility="Auto"
VerticalScrollBarVisibility="Auto"
x:Name="PART_ScrollViewer">
<Grid>
<Grid.RenderTransform>
<TransformGroup>
<ScaleTransform ScaleX="{TemplateBinding ZoomScale}"
ScaleY="{TemplateBinding ZoomScale}"
CenterX="{Binding ZoomCenter.X, RelativeSource={RelativeSource TemplatedParent}}"
CenterY="{Binding ZoomCenter.Y, RelativeSource={RelativeSource TemplatedParent}}" />
<TranslateTransform X="{TemplateBinding PanningOffsetX}"
Y="{TemplateBinding PanningOffsetY}" />
</TransformGroup>
</Grid.RenderTransform>
<AdornerDecorator x:Name="PART_Adorner">
<Canvas x:Name="PART_Canvas"
Width="{TemplateBinding ImageWidth}"
Height="{TemplateBinding ImageHeight}"
Background="White">
<!-- 图像显示 -->
<Image Source="{TemplateBinding ImageSource}"
Width="{TemplateBinding ImageWidth}"
Height="{TemplateBinding ImageHeight}"
Stretch="Fill" />
<!-- ROI项目容器 - 只支持多边形 -->
<ItemsControl ItemsSource="{TemplateBinding ROIItems}">
<ItemsControl.ItemsPanel>
<ItemsPanelTemplate>
<Canvas />
</ItemsPanelTemplate>
</ItemsControl.ItemsPanel>
<ItemsControl.ItemTemplate>
<DataTemplate>
<!-- 多边形ROI -->
<Polygon Points="{Binding Points}"
Stroke="{Binding Color}"
StrokeThickness="1"
Fill="Transparent" />
</DataTemplate>
</ItemsControl.ItemTemplate>
</ItemsControl>
</Canvas>
</AdornerDecorator>
</Grid>
</ScrollViewer>
</Grid>
</Border>
</ControlTemplate>
</Setter.Value>
</Setter>
</Style>
</ResourceDictionary>
+517
View File
@@ -0,0 +1,517 @@
using Basler.Pylon;
using Serilog;
using CameraImageGrabbedEventArgs = XP.Camera.ImageGrabbedEventArgs;
using CameraGrabErrorEventArgs = XP.Camera.GrabErrorEventArgs;
namespace XP.Camera;
/// <summary>
/// Basler 相机控制器,封装 Basler pylon .NET SDK 实现 <see cref="ICameraController"/>。
/// </summary>
/// <remarks>
/// <para>所有公共方法通过内部 <c>_syncLock</c> 对象进行 lock 同步,保证线程安全。</para>
/// <para>事件回调(ImageGrabbed、GrabError)在 StreamGrabber 回调线程上触发,不持有 _syncLock,避免死锁。</para>
/// <para>ConnectionLost 事件在 pylon SDK 事件线程上触发。</para>
/// </remarks>
public class BaslerCameraController : ICameraController
{
private static readonly ILogger _logger = Log.ForContext<BaslerCameraController>();
private readonly object _syncLock = new();
private Basler.Pylon.Camera? _camera;
private CameraInfo? _cachedCameraInfo;
private bool _isConnected;
private bool _isGrabbing;
/// <summary>
/// 初始化 <see cref="BaslerCameraController"/> 实例。
/// </summary>
public BaslerCameraController()
{
}
/// <inheritdoc />
public bool IsConnected
{
get { lock (_syncLock) { return _isConnected; } }
}
/// <inheritdoc />
public bool IsGrabbing
{
get { lock (_syncLock) { return _isGrabbing; } }
}
/// <inheritdoc />
public event EventHandler<CameraImageGrabbedEventArgs>? ImageGrabbed;
/// <inheritdoc />
public event EventHandler<CameraGrabErrorEventArgs>? GrabError;
/// <inheritdoc />
public event EventHandler? ConnectionLost;
/// <inheritdoc />
public CameraInfo Open()
{
lock (_syncLock)
{
if (_isConnected && _cachedCameraInfo != null)
{
_logger.Information("Camera already connected, returning cached info.");
return _cachedCameraInfo;
}
try
{
_logger.Information("Opening camera connection...");
_camera = new Basler.Pylon.Camera(CameraSelectionStrategy.FirstFound);
_camera.CameraOpened += (sender, e) => Configuration.SoftwareTrigger(sender!, e);
_camera.ConnectionLost += OnConnectionLost;
_camera.Open();
_cachedCameraInfo = new CameraInfo(
ModelName: _camera.CameraInfo![CameraInfoKey.ModelName] ?? "",
SerialNumber: _camera.CameraInfo[CameraInfoKey.SerialNumber] ?? "",
VendorName: _camera.CameraInfo[CameraInfoKey.VendorName] ?? "",
DeviceType: _camera.CameraInfo[CameraInfoKey.DeviceType] ?? ""
);
_isConnected = true;
_logger.Information("Camera connected: {ModelName} (SN: {SerialNumber})",
_cachedCameraInfo.ModelName, _cachedCameraInfo.SerialNumber);
return _cachedCameraInfo;
}
catch (Exception ex) when (ex is not CameraException)
{
// Clean up partially created camera on failure
_camera?.Dispose();
_camera = null;
if (ex.Message.Contains("No device", StringComparison.OrdinalIgnoreCase)
|| ex.Message.Contains("not found", StringComparison.OrdinalIgnoreCase)
|| ex is InvalidOperationException)
{
_logger.Error(ex, "No camera device found.");
throw new DeviceNotFoundException("No Basler camera device found.", ex);
}
_logger.Error(ex, "Failed to open camera.");
throw new CameraException("Failed to open camera device.", ex);
}
}
}
/// <inheritdoc />
public void Close()
{
lock (_syncLock)
{
if (!_isConnected)
{
_logger.Information("Camera not connected, Close() ignored.");
return;
}
try
{
if (_isGrabbing)
{
StopGrabbingInternal();
}
_logger.Information("Closing camera connection...");
_camera?.Close();
_camera?.Dispose();
_camera = null;
_isConnected = false;
_cachedCameraInfo = null;
_logger.Information("Camera connection closed.");
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Error while closing camera.");
// Still clean up state even if close fails
_camera = null;
_isConnected = false;
_isGrabbing = false;
_cachedCameraInfo = null;
throw new CameraException("Failed to close camera device.", ex);
}
}
}
/// <inheritdoc />
public void StartGrabbing()
{
lock (_syncLock)
{
EnsureConnected();
if (_isGrabbing)
{
_logger.Information("Already grabbing, StartGrabbing() ignored.");
return;
}
try
{
_logger.Information("Starting grabbing with software trigger...");
// Register ImageReady event for grab results (task 6.2 will implement the handler)
_camera!.StreamGrabber!.ImageGrabbed += OnImageGrabbed;
// Start grabbing using the grab loop thread provided by StreamGrabber
_camera.StreamGrabber!.Start(GrabStrategy.OneByOne, GrabLoop.ProvidedByStreamGrabber);
_isGrabbing = true;
_logger.Information("Grabbing started.");
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to start grabbing.");
throw new CameraException("Failed to start grabbing.", ex);
}
}
}
/// <inheritdoc />
public void ExecuteSoftwareTrigger()
{
lock (_syncLock)
{
if (!_isGrabbing)
{
throw new InvalidOperationException("Cannot execute software trigger: camera is not grabbing.");
}
try
{
// Wait until the camera is ready to accept the next frame trigger
if (!_camera!.WaitForFrameTriggerReady(1000, TimeoutHandling.Return))
{
throw new TimeoutException("Camera was not ready for frame trigger within 1000 ms.");
}
_camera.ExecuteSoftwareTrigger();
_logger.Debug("Software trigger executed.");
}
catch (TimeoutException)
{
throw; // Re-throw our own TimeoutException
}
catch (Exception ex) when (ex is not CameraException and not InvalidOperationException)
{
_logger.Error(ex, "Failed to execute software trigger.");
throw new CameraException("Failed to execute software trigger.", ex);
}
}
}
/// <inheritdoc />
public void StopGrabbing()
{
lock (_syncLock)
{
if (!_isGrabbing)
{
return;
}
StopGrabbingInternal();
}
}
/// <inheritdoc />
public double GetExposureTime()
{
lock (_syncLock)
{
EnsureConnected();
try
{
return _camera!.Parameters[PLCamera.ExposureTime].GetValue();
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to get exposure time.");
throw new CameraException("Failed to get exposure time.", ex);
}
}
}
/// <inheritdoc />
public void SetExposureTime(double microseconds)
{
lock (_syncLock)
{
EnsureConnected();
try
{
_camera!.Parameters[PLCamera.ExposureTime].SetValue(microseconds);
_logger.Information("Exposure time set to {Microseconds} µs.", microseconds);
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to set exposure time to {Microseconds} µs.", microseconds);
throw new CameraException("Failed to set exposure time.", ex);
}
}
}
/// <inheritdoc />
public double GetGain()
{
lock (_syncLock)
{
EnsureConnected();
try
{
return _camera!.Parameters[PLCamera.Gain].GetValue();
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to get gain.");
throw new CameraException("Failed to get gain.", ex);
}
}
}
/// <inheritdoc />
public void SetGain(double value)
{
lock (_syncLock)
{
EnsureConnected();
try
{
_camera!.Parameters[PLCamera.Gain].SetValue(value);
_logger.Information("Gain set to {Value}.", value);
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to set gain to {Value}.", value);
throw new CameraException("Failed to set gain.", ex);
}
}
}
/// <inheritdoc />
public int GetWidth()
{
lock (_syncLock)
{
EnsureConnected();
try
{
return (int)_camera!.Parameters[PLCamera.Width].GetValue();
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to get width.");
throw new CameraException("Failed to get width.", ex);
}
}
}
/// <inheritdoc />
public void SetWidth(int value)
{
lock (_syncLock)
{
EnsureConnected();
try
{
_camera!.Parameters[PLCamera.Width].SetValue(value, IntegerValueCorrection.Nearest);
_logger.Information("Width set to {Value}.", value);
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to set width to {Value}.", value);
throw new CameraException("Failed to set width.", ex);
}
}
}
/// <inheritdoc />
public int GetHeight()
{
lock (_syncLock)
{
EnsureConnected();
try
{
return (int)_camera!.Parameters[PLCamera.Height].GetValue();
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to get height.");
throw new CameraException("Failed to get height.", ex);
}
}
}
/// <inheritdoc />
public void SetHeight(int value)
{
lock (_syncLock)
{
EnsureConnected();
try
{
_camera!.Parameters[PLCamera.Height].SetValue(value, IntegerValueCorrection.Nearest);
_logger.Information("Height set to {Value}.", value);
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to set height to {Value}.", value);
throw new CameraException("Failed to set height.", ex);
}
}
}
/// <inheritdoc />
public string GetPixelFormat()
{
lock (_syncLock)
{
EnsureConnected();
try
{
return _camera!.Parameters[PLCamera.PixelFormat].GetValue();
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to get pixel format.");
throw new CameraException("Failed to get pixel format.", ex);
}
}
}
/// <inheritdoc />
public void SetPixelFormat(string format)
{
lock (_syncLock)
{
EnsureConnected();
try
{
_camera!.Parameters[PLCamera.PixelFormat].SetValue(format);
_logger.Information("Pixel format set to {Format}.", format);
}
catch (Exception ex) when (ex is not CameraException)
{
_logger.Error(ex, "Failed to set pixel format to {Format}.", format);
throw new CameraException("Failed to set pixel format.", ex);
}
}
}
/// <inheritdoc />
public void Dispose()
{
Close();
GC.SuppressFinalize(this);
}
/// <summary>
/// StreamGrabber.ImageGrabbed 事件处理。在 StreamGrabber 回调线程上调用,不持有 _syncLock。
/// </summary>
/// <remarks>
/// <para>当图像采集成功时,提取像素数据、宽高和像素格式,触发 <see cref="ImageGrabbed"/> 事件。</para>
/// <para>当图像采集失败时,提取错误码和错误描述,触发 <see cref="GrabError"/> 事件。</para>
/// <para>此方法在 StreamGrabber 回调线程上执行,不持有 _syncLock,以避免死锁。
/// 调用方如需在 WPF UI 线程上处理事件,应自行通过 Dispatcher 调度。</para>
/// </remarks>
private void OnImageGrabbed(object? sender, Basler.Pylon.ImageGrabbedEventArgs e)
{
try
{
IGrabResult grabResult = e.GrabResult;
if (grabResult.GrabSucceeded)
{
byte[] pixelData = grabResult.PixelData as byte[] ?? Array.Empty<byte>();
int width = grabResult.Width;
int height = grabResult.Height;
string pixelFormat = grabResult.PixelTypeValue.ToString();
var args = new CameraImageGrabbedEventArgs(pixelData, width, height, pixelFormat);
ImageGrabbed?.Invoke(this, args);
}
else
{
int errorCode = (int)grabResult.ErrorCode;
string errorDescription = grabResult.ErrorDescription ?? "Unknown grab error.";
_logger.Error("Image grab failed. ErrorCode: {ErrorCode}, Description: {ErrorDescription}",
errorCode, errorDescription);
var args = new CameraGrabErrorEventArgs(errorCode, errorDescription);
GrabError?.Invoke(this, args);
}
}
catch (Exception ex)
{
_logger.Error(ex, "Exception in OnImageGrabbed handler.");
}
}
/// <summary>
/// ConnectionLost 事件处理。在 pylon SDK 事件线程上调用。
/// </summary>
private void OnConnectionLost(object? sender, EventArgs e)
{
_logger.Warning("Camera connection lost.");
lock (_syncLock)
{
_isGrabbing = false;
_isConnected = false;
_cachedCameraInfo = null;
}
// Raise event outside lock to avoid deadlock
ConnectionLost?.Invoke(this, EventArgs.Empty);
}
/// <summary>
/// Internal stop grabbing without lock (caller must hold _syncLock).
/// </summary>
private void StopGrabbingInternal()
{
if (!_isGrabbing)
return;
try
{
_camera?.StreamGrabber?.Stop();
if (_camera != null)
_camera.StreamGrabber!.ImageGrabbed -= OnImageGrabbed;
_isGrabbing = false;
_logger.Information("Grabbing stopped.");
}
catch (Exception ex) when (ex is not CameraException)
{
_isGrabbing = false;
_logger.Error(ex, "Error while stopping grabbing.");
throw new CameraException("Failed to stop grabbing.", ex);
}
}
/// <summary>
/// Throws <see cref="InvalidOperationException"/> if the camera is not connected.
/// Must be called within a lock on <see cref="_syncLock"/>.
/// </summary>
private void EnsureConnected()
{
if (!_isConnected)
{
throw new InvalidOperationException("Camera is not connected. Call Open() first.");
}
}
}
+25
View File
@@ -0,0 +1,25 @@
namespace XP.Camera;
/// <summary>所有相机相关错误的基类异常。</summary>
public class CameraException : Exception
{
public CameraException() { }
public CameraException(string message) : base(message) { }
public CameraException(string message, Exception innerException) : base(message, innerException) { }
}
/// <summary>当相机连接意外断开时抛出的异常。</summary>
public class ConnectionLostException : CameraException
{
public ConnectionLostException() { }
public ConnectionLostException(string message) : base(message) { }
public ConnectionLostException(string message, Exception innerException) : base(message, innerException) { }
}
/// <summary>当系统中无可用相机设备时抛出的异常。</summary>
public class DeviceNotFoundException : CameraException
{
public DeviceNotFoundException() { }
public DeviceNotFoundException(string message) : base(message) { }
public DeviceNotFoundException(string message, Exception innerException) : base(message, innerException) { }
}
+18
View File
@@ -0,0 +1,18 @@
namespace XP.Camera;
/// <summary>
/// 统一相机工厂,根据品牌名称创建对应的相机控制器。
/// </summary>
public class CameraFactory : ICameraFactory
{
/// <inheritdoc />
public ICameraController CreateController(string cameraType)
{
return cameraType switch
{
"Basler" => new BaslerCameraController(),
// "Hikvision" => new HikvisionCameraController(),
_ => throw new NotSupportedException($"不支持的相机品牌: {cameraType}")
};
}
}
+39
View File
@@ -0,0 +1,39 @@
namespace XP.Camera;
/// <summary>相机设备信息。</summary>
public record CameraInfo(
string ModelName,
string SerialNumber,
string VendorName,
string DeviceType
);
/// <summary>图像采集成功事件参数。</summary>
public class ImageGrabbedEventArgs : EventArgs
{
public byte[] PixelData { get; }
public int Width { get; }
public int Height { get; }
public string PixelFormat { get; }
public ImageGrabbedEventArgs(byte[] pixelData, int width, int height, string pixelFormat)
{
PixelData = pixelData;
Width = width;
Height = height;
PixelFormat = pixelFormat;
}
}
/// <summary>图像采集失败事件参数。</summary>
public class GrabErrorEventArgs : EventArgs
{
public int ErrorCode { get; }
public string ErrorDescription { get; }
public GrabErrorEventArgs(int errorCode, string errorDescription)
{
ErrorCode = errorCode;
ErrorDescription = errorDescription;
}
}
+53
View File
@@ -0,0 +1,53 @@
namespace XP.Camera;
/// <summary>
/// 相机控制器接口,定义与品牌无关的相机操作契约。
/// </summary>
/// <remarks>
/// <para>所有公共方法(Open/Close/StartGrabbing/StopGrabbing/ExecuteSoftwareTrigger/参数读写)保证线程安全。</para>
/// <para>事件回调在非 UI 线程上触发,调用方如需更新 WPF 界面,应通过 Dispatcher 调度。</para>
/// </remarks>
public interface ICameraController : IDisposable
{
bool IsConnected { get; }
bool IsGrabbing { get; }
/// <summary>打开相机连接并返回设备信息。</summary>
CameraInfo Open();
/// <summary>关闭相机连接并释放资源。</summary>
void Close();
/// <summary>以软件触发模式启动图像采集。</summary>
void StartGrabbing();
/// <summary>发送一次软件触发信号以采集一帧图像。</summary>
void ExecuteSoftwareTrigger();
/// <summary>停止图像采集。</summary>
void StopGrabbing();
double GetExposureTime();
void SetExposureTime(double microseconds);
double GetGain();
void SetGain(double value);
int GetWidth();
void SetWidth(int value);
int GetHeight();
void SetHeight(int value);
string GetPixelFormat();
void SetPixelFormat(string format);
event EventHandler<ImageGrabbedEventArgs> ImageGrabbed;
event EventHandler<GrabErrorEventArgs> GrabError;
event EventHandler ConnectionLost;
}
/// <summary>
/// 相机工厂接口,负责根据品牌创建相机控制器实例。
/// </summary>
public interface ICameraFactory
{
/// <summary>根据相机品牌创建控制器实例。</summary>
ICameraController CreateController(string cameraType);
}
+34
View File
@@ -0,0 +1,34 @@
using System.Windows.Media;
using System.Windows.Media.Imaging;
namespace XP.Camera;
/// <summary>
/// 提供像素数据到 WPF BitmapSource 的转换工具方法。
/// </summary>
public static class PixelConverter
{
/// <summary>
/// 将原始像素数据转换为 WPF 的 BitmapSource 对象。
/// 返回的 BitmapSource 已调用 Freeze(),可跨线程访问。
/// </summary>
public static BitmapSource ToBitmapSource(byte[] pixelData, int width, int height, string pixelFormat)
{
ArgumentNullException.ThrowIfNull(pixelData);
if (width <= 0) throw new ArgumentException("Width must be a positive integer.", nameof(width));
if (height <= 0) throw new ArgumentException("Height must be a positive integer.", nameof(height));
ArgumentNullException.ThrowIfNull(pixelFormat);
var (format, stride) = pixelFormat switch
{
"Mono8" => (PixelFormats.Gray8, width),
"BGR8" => (PixelFormats.Bgr24, width * 3),
"BGRA8" => (PixelFormats.Bgra32, width * 4),
_ => throw new NotSupportedException($"Pixel format '{pixelFormat}' is not supported.")
};
var bitmap = BitmapSource.Create(width, height, 96, 96, format, null, pixelData, stride);
bitmap.Freeze();
return bitmap;
}
}
+242
View File
@@ -0,0 +1,242 @@
# XP.Camera 使用说明
基于 .NET 8 WPF 的工业相机控制类库,采用工厂模式 + 统一接口设计,支持多品牌相机扩展。当前已实现 Basler pylon SDK 驱动。
## 环境要求
- .NET 8 SDK
- Windows 操作系统
- Basler pylon 8 SDK(已安装并配置环境变量)
## 项目结构
```
XP.Camera/
├── ICameraController.cs # 控制器接口 + 工厂接口
├── CameraFactory.cs # 统一工厂(根据品牌创建控制器)
├── BaslerCameraController.cs # Basler 实现
├── CameraModels.cs # CameraInfo、ImageGrabbedEventArgs、GrabErrorEventArgs
├── CameraExceptions.cs # CameraException、ConnectionLostException、DeviceNotFoundException
├── PixelConverter.cs # 像素数据 → WPF BitmapSource 转换工具
└── XP.Camera.csproj
```
所有类型统一在 `XP.Camera` 命名空间下。
## 项目引用
```xml
<ProjectReference Include="..\XP.Camera\XP.Camera.csproj" />
```
## 快速开始
### 1. 通过工厂创建控制器
```csharp
using XP.Camera;
ICameraFactory factory = new CameraFactory();
using ICameraController camera = factory.CreateController("Basler");
CameraInfo info = camera.Open();
Console.WriteLine($"已连接: {info.ModelName} (SN: {info.SerialNumber})");
```
### 2. 依赖注入方式(推荐)
在 Prism / DI 容器中注册:
```csharp
// App.xaml.cs
var config = AppConfig.Load();
containerRegistry.RegisterSingleton<ICameraFactory, CameraFactory>();
containerRegistry.RegisterSingleton<ICameraController>(() =>
new CameraFactory().CreateController(config.CameraType));
```
ViewModel 中注入使用:
```csharp
public class MyViewModel
{
private readonly ICameraController _camera;
public MyViewModel(ICameraController camera)
{
_camera = camera;
}
}
```
相机品牌通过配置文件 `config.json` 指定:
```json
{
"CameraType": "Basler"
}
```
### 3. 实时图像显示(WPF 绑定)
```csharp
_camera.ImageGrabbed += (s, e) =>
{
// PixelConverter 返回已 Freeze 的 BitmapSource,可跨线程传递
var bitmap = PixelConverter.ToBitmapSource(
e.PixelData, e.Width, e.Height, e.PixelFormat);
Application.Current.Dispatcher.Invoke(() =>
{
CameraImageSource = bitmap;
});
};
```
XAML 绑定:
```xml
<Image Source="{Binding CameraImageSource}" Stretch="Uniform" />
```
### 4. 软件触发采集流程
```csharp
camera.Open();
camera.SetExposureTime(10000); // 10ms
camera.StartGrabbing();
// 每次需要采集时调用(结果通过 ImageGrabbed 事件返回)
camera.ExecuteSoftwareTrigger();
camera.StopGrabbing();
camera.Close();
```
### 5. 实时连续采集(链式触发)
收到上一帧后立即触发下一帧,自动适配任何帧率:
```csharp
private volatile bool _liveViewRunning;
_camera.ImageGrabbed += (s, e) =>
{
var bitmap = PixelConverter.ToBitmapSource(e.PixelData, e.Width, e.Height, e.PixelFormat);
Application.Current.Dispatcher.Invoke(() => CameraImageSource = bitmap);
if (_liveViewRunning)
_camera.ExecuteSoftwareTrigger(); // 链式触发下一帧
};
// 启动实时
_camera.StartGrabbing();
_liveViewRunning = true;
_camera.ExecuteSoftwareTrigger(); // 触发第一帧
// 停止实时
_liveViewRunning = false;
```
## 核心接口
### ICameraController
| 方法 | 说明 |
|------|------|
| `Open()` | 打开连接,返回 `CameraInfo` |
| `Close()` | 关闭连接(自动停止采集) |
| `StartGrabbing()` | 以软件触发模式启动采集 |
| `ExecuteSoftwareTrigger()` | 触发一帧采集 |
| `StopGrabbing()` | 停止采集 |
### 参数读写
| 方法 | 说明 |
|------|------|
| `Get/SetExposureTime(double)` | 曝光时间(微秒) |
| `Get/SetGain(double)` | 增益值 |
| `Get/SetWidth(int)` | 图像宽度(自动校正到有效值) |
| `Get/SetHeight(int)` | 图像高度(自动校正到有效值) |
| `Get/SetPixelFormat(string)` | 像素格式(Mono8 / BGR8 / BGRA8 |
### ICameraFactory
| 方法 | 说明 |
|------|------|
| `CreateController(string cameraType)` | 根据品牌名创建控制器 |
当前支持的 `cameraType` 值:`"Basler"`
## 事件
| 事件 | 说明 | 触发线程 |
|------|------|----------|
| `ImageGrabbed` | 成功采集一帧图像 | StreamGrabber 回调线程 |
| `GrabError` | 图像采集失败 | StreamGrabber 回调线程 |
| `ConnectionLost` | 相机连接意外断开 | pylon SDK 事件线程 |
> 所有事件均在非 UI 线程触发。更新 WPF 界面时需通过 `Dispatcher.Invoke` 调度。
> `PixelConverter.ToBitmapSource()` 返回的 BitmapSource 已调用 `Freeze()`,可直接跨线程传递。
## 异常处理
```csharp
try
{
camera.Open();
}
catch (DeviceNotFoundException)
{
// 无可用相机设备
}
catch (CameraException ex)
{
// 其他相机错误,ex.InnerException 包含原始 SDK 异常
}
```
| 异常类型 | 场景 |
|---------|------|
| `DeviceNotFoundException` | 无可用相机 |
| `ConnectionLostException` | 相机物理断开 |
| `CameraException` | SDK 操作失败(基类) |
| `InvalidOperationException` | 未连接时访问参数,未采集时触发 |
| `TimeoutException` | 软件触发等待超时 |
## 扩展其他品牌相机
1. 实现 `ICameraController` 接口:
```csharp
public class HikvisionCameraController : ICameraController
{
// 实现所有接口方法...
}
```
2.`CameraFactory.cs` 中注册:
```csharp
public ICameraController CreateController(string cameraType)
{
return cameraType switch
{
"Basler" => new BaslerCameraController(),
"Hikvision" => new HikvisionCameraController(),
_ => throw new NotSupportedException($"不支持的相机品牌: {cameraType}")
};
}
```
3. 配置文件切换品牌即可,业务代码无需修改。
## 线程安全
- 所有公共方法(Open / Close / StartGrabbing / StopGrabbing / ExecuteSoftwareTrigger / 参数读写)均线程安全
- 事件回调不持有内部锁,不会导致死锁
- `Open()` / `Close()` 幂等,重复调用安全
## 日志
使用 Serilog 静态 API`Log.ForContext<T>()`),与宿主应用共享同一个日志管道。宿主应用只需在启动时配置 `Log.Logger` 即可。
+19
View File
@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<UseWPF>true</UseWPF>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<RootNamespace>XP.Camera</RootNamespace>
<AssemblyName>XP.Camera</AssemblyName>
</PropertyGroup>
<ItemGroup>
<Reference Include="Basler.Pylon">
<HintPath>C:\Program Files\Basler\pylon 8\Development\Assemblies\Basler.Pylon\x64\Basler.Pylon.dll</HintPath>
</Reference>
<PackageReference Include="Serilog" Version="4.3.1" />
</ItemGroup>
</Project>
+93
View File
@@ -7,6 +7,39 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "XplorePlane", "XplorePlane\
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "XplorePlane.Tests", "XplorePlane.Tests\XplorePlane.Tests.csproj", "{6234B622-8DF2-4A8D-AF93-B17774019555}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "XP.Camera", "XP.Camera\XP.Camera.csproj", "{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ImageProcessing.Core", "ImageProcessing.Core\ImageProcessing.Core.csproj", "{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ImageProcessing.Processors", "ImageProcessing.Processors\ImageProcessing.Processors.csproj", "{2687E12E-3053-E1C6-5268-E4FF547EC212}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ImageProcessing.Controls", "ImageProcessing.Controls\ImageProcessing.Controls.csproj", "{9460CF45-8A25-9770-03AF-4602A2FFF016}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ImageROIControl", "ImageROIControl\ImageROIControl.csproj", "{57061533-EC58-1B1C-3862-9164BC73C806}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ExternalLibraries", "ExternalLibraries", "{02EA681E-C7D8-13C7-8484-4AC65E1B71E8}"
ProjectSection(SolutionItems) = preProject
ExternalLibraries\concrt140.dll = ExternalLibraries\concrt140.dll
ExternalLibraries\config.json = ExternalLibraries\config.json
ExternalLibraries\cvextern.dll = ExternalLibraries\cvextern.dll
ExternalLibraries\Models\EDSR_x2.onnx = ExternalLibraries\Models\EDSR_x2.onnx
ExternalLibraries\Models\EDSR_x3.onnx = ExternalLibraries\Models\EDSR_x3.onnx
ExternalLibraries\Models\EDSR_x4.onnx = ExternalLibraries\Models\EDSR_x4.onnx
ExternalLibraries\Models\FSRCNN_x2.onnx = ExternalLibraries\Models\FSRCNN_x2.onnx
ExternalLibraries\Models\FSRCNN_x3.onnx = ExternalLibraries\Models\FSRCNN_x3.onnx
ExternalLibraries\Models\FSRCNN_x4.onnx = ExternalLibraries\Models\FSRCNN_x4.onnx
ExternalLibraries\libusb-1.0.dll = ExternalLibraries\libusb-1.0.dll
ExternalLibraries\msvcp140.dll = ExternalLibraries\msvcp140.dll
ExternalLibraries\msvcp140_1.dll = ExternalLibraries\msvcp140_1.dll
ExternalLibraries\msvcp140_2.dll = ExternalLibraries\msvcp140_2.dll
ExternalLibraries\msvcp140_atomic_wait.dll = ExternalLibraries\msvcp140_atomic_wait.dll
ExternalLibraries\msvcp140_codecvt_ids.dll = ExternalLibraries\msvcp140_codecvt_ids.dll
ExternalLibraries\opencv_videoio_ffmpeg490_64.dll = ExternalLibraries\opencv_videoio_ffmpeg490_64.dll
ExternalLibraries\vcruntime140.dll = ExternalLibraries\vcruntime140.dll
ExternalLibraries\vcruntime140_1.dll = ExternalLibraries\vcruntime140_1.dll
ExternalLibraries\version_string.inc = ExternalLibraries\version_string.inc
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -41,6 +74,66 @@ Global
{6234B622-8DF2-4A8D-AF93-B17774019555}.Release|x64.Build.0 = Release|Any CPU
{6234B622-8DF2-4A8D-AF93-B17774019555}.Release|x86.ActiveCfg = Release|Any CPU
{6234B622-8DF2-4A8D-AF93-B17774019555}.Release|x86.Build.0 = Release|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Debug|Any CPU.Build.0 = Debug|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Debug|x64.ActiveCfg = Debug|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Debug|x64.Build.0 = Debug|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Debug|x86.ActiveCfg = Debug|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Debug|x86.Build.0 = Debug|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Release|Any CPU.ActiveCfg = Release|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Release|Any CPU.Build.0 = Release|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Release|x64.ActiveCfg = Release|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Release|x64.Build.0 = Release|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Release|x86.ActiveCfg = Release|Any CPU
{82762CDE-48CC-4E28-ABEC-1FC752BACEF4}.Release|x86.Build.0 = Release|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Debug|x64.ActiveCfg = Debug|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Debug|x64.Build.0 = Debug|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Debug|x86.ActiveCfg = Debug|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Debug|x86.Build.0 = Debug|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Release|Any CPU.Build.0 = Release|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Release|x64.ActiveCfg = Release|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Release|x64.Build.0 = Release|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Release|x86.ActiveCfg = Release|Any CPU
{01EDC1D8-F6BC-2677-AE59-89BA3FC2C74F}.Release|x86.Build.0 = Release|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Debug|x64.ActiveCfg = Debug|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Debug|x64.Build.0 = Debug|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Debug|x86.ActiveCfg = Debug|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Debug|x86.Build.0 = Debug|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Release|Any CPU.Build.0 = Release|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Release|x64.ActiveCfg = Release|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Release|x64.Build.0 = Release|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Release|x86.ActiveCfg = Release|Any CPU
{2687E12E-3053-E1C6-5268-E4FF547EC212}.Release|x86.Build.0 = Release|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Debug|x64.ActiveCfg = Debug|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Debug|x64.Build.0 = Debug|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Debug|x86.ActiveCfg = Debug|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Debug|x86.Build.0 = Debug|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Release|Any CPU.Build.0 = Release|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Release|x64.ActiveCfg = Release|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Release|x64.Build.0 = Release|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Release|x86.ActiveCfg = Release|Any CPU
{9460CF45-8A25-9770-03AF-4602A2FFF016}.Release|x86.Build.0 = Release|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Debug|Any CPU.Build.0 = Debug|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Debug|x64.ActiveCfg = Debug|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Debug|x64.Build.0 = Debug|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Debug|x86.ActiveCfg = Debug|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Debug|x86.Build.0 = Debug|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Release|Any CPU.ActiveCfg = Release|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Release|Any CPU.Build.0 = Release|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Release|x64.ActiveCfg = Release|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Release|x64.Build.0 = Release|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Release|x86.ActiveCfg = Release|Any CPU
{57061533-EC58-1B1C-3862-9164BC73C806}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
+25
View File
@@ -12,8 +12,10 @@ using XplorePlane.Services.Cnc;
using XplorePlane.Services.Matrix;
using XplorePlane.Services.Measurement;
using XplorePlane.Services.Recipe;
using XplorePlane.Services.Camera;
using XplorePlane.ViewModels.Cnc;
using XplorePlane.Views.Cnc;
using XP.Camera;
using Prism.Ioc;
using Prism.DryIoc;
using Prism.Modularity;
@@ -146,6 +148,22 @@ namespace XplorePlane
Log.Error(ex, "射线源资源释放失败");
}
// 释放相机服务资源
try
{
var bootstrapper = AppBootstrapper.Instance;
if (bootstrapper != null)
{
var cameraService = bootstrapper.Container.Resolve<ICameraService>();
cameraService?.Dispose();
Log.Information("相机服务资源已释放");
}
}
catch (Exception ex)
{
Log.Error(ex, "相机服务资源释放失败");
}
Log.CloseAndFlush();
base.OnExit(e);
}
@@ -211,6 +229,7 @@ namespace XplorePlane
containerRegistry.RegisterForNavigation<MainWindow>();
containerRegistry.RegisterForNavigation<MainWindowB>();
containerRegistry.Register<MainViewModel>();
containerRegistry.RegisterSingleton<NavigationPropertyPanelViewModel>();
// 注册图像处理服务与视图
containerRegistry.RegisterSingleton<IImageProcessingService, ImageProcessingService>();
@@ -266,6 +285,12 @@ namespace XplorePlane
containerRegistry.RegisterForNavigation<CncPageView>();
containerRegistry.RegisterForNavigation<MatrixPageView>();
// ── 相机服务(单例)──
containerRegistry.RegisterSingleton<ICameraFactory, CameraFactory>();
containerRegistry.RegisterSingleton<ICameraController>(() =>
new CameraFactory().CreateController("Basler"));
containerRegistry.RegisterSingleton<ICameraService, CameraService>();
Log.Information("依赖注入容器配置完成");
}
@@ -0,0 +1,194 @@
using System;
using System.Diagnostics;
using System.Windows;
using System.Windows.Media.Imaging;
using XP.Camera;
using XP.Common.Logging.Interfaces;
using XplorePlane.Models;
using XplorePlane.Services.AppState;
namespace XplorePlane.Services.Camera
{
/// <summary>
/// 相机管理服务实现,封装 ICameraController 并将状态同步到 IAppStateService。
/// </summary>
public class CameraService : ICameraService
{
private readonly ICameraController _controller;
private readonly IAppStateService _appState;
private readonly ILoggerService _logger;
private volatile bool _liveViewRunning;
private bool _disposed;
private readonly Stopwatch _fpsStopwatch = new();
private int _frameCount;
public bool IsConnected => _controller.IsConnected;
public bool IsGrabbing => _controller.IsGrabbing;
public bool IsLiveView => _liveViewRunning;
public event EventHandler<BitmapSource> FrameArrived;
public event EventHandler ConnectionLost;
public CameraService(
ICameraController controller,
IAppStateService appState,
ILoggerService logger)
{
_controller = controller ?? throw new ArgumentNullException(nameof(controller));
_appState = appState ?? throw new ArgumentNullException(nameof(appState));
_logger = logger?.ForModule<CameraService>() ?? throw new ArgumentNullException(nameof(logger));
_controller.ImageGrabbed += OnImageGrabbed;
_controller.GrabError += OnGrabError;
_controller.ConnectionLost += OnConnectionLost;
}
public CameraInfo Connect()
{
var info = _controller.Open();
_logger.Info("相机已连接: {ModelName} (SN: {SerialNumber})", info.ModelName, info.SerialNumber);
var resolution = $"{_controller.GetWidth()}x{_controller.GetHeight()}";
_appState.UpdateCameraState(new CameraState(
IsConnected: true,
IsStreaming: false,
CurrentFrame: null,
Width: _controller.GetWidth(),
Height: _controller.GetHeight(),
FrameRate: 0));
return info;
}
public void Disconnect()
{
_liveViewRunning = false;
_controller.Close();
_appState.UpdateCameraState(CameraState.Default);
_logger.Info("相机已断开");
}
public void StartGrabbing()
{
_controller.StartGrabbing();
_fpsStopwatch.Restart();
_frameCount = 0;
UpdateStreamingState(true);
}
public void TriggerOnce()
{
_controller.ExecuteSoftwareTrigger();
}
public void StopGrabbing()
{
_liveViewRunning = false;
_controller.StopGrabbing();
_fpsStopwatch.Stop();
UpdateStreamingState(false);
}
public void StartLiveView()
{
if (!_controller.IsGrabbing)
StartGrabbing();
_liveViewRunning = true;
_controller.ExecuteSoftwareTrigger();
_logger.Info("实时预览已启动");
}
public void StopLiveView()
{
_liveViewRunning = false;
_logger.Info("实时预览已停止");
}
// ── 参数读写(直接委托给 controller)──
public double GetExposureTime() => _controller.GetExposureTime();
public void SetExposureTime(double microseconds) => _controller.SetExposureTime(microseconds);
public double GetGain() => _controller.GetGain();
public void SetGain(double value) => _controller.SetGain(value);
public int GetWidth() => _controller.GetWidth();
public void SetWidth(int value) => _controller.SetWidth(value);
public int GetHeight() => _controller.GetHeight();
public void SetHeight(int value) => _controller.SetHeight(value);
public string GetPixelFormat() => _controller.GetPixelFormat();
public void SetPixelFormat(string format) => _controller.SetPixelFormat(format);
// ── 事件处理 ──
private void OnImageGrabbed(object sender, ImageGrabbedEventArgs e)
{
try
{
var bitmap = PixelConverter.ToBitmapSource(e.PixelData, e.Width, e.Height, e.PixelFormat);
// 计算帧率
_frameCount++;
double fps = 0;
if (_fpsStopwatch.ElapsedMilliseconds > 0)
fps = _frameCount / (_fpsStopwatch.ElapsedMilliseconds / 1000.0);
// 更新全局状态
_appState.UpdateCameraState(new CameraState(
IsConnected: true,
IsStreaming: true,
CurrentFrame: bitmap,
Width: e.Width,
Height: e.Height,
FrameRate: fps));
// 通知 UI
var app = Application.Current;
app?.Dispatcher.BeginInvoke(() => FrameArrived?.Invoke(this, bitmap));
// 链式触发下一帧
if (_liveViewRunning)
_controller.ExecuteSoftwareTrigger();
}
catch (Exception ex)
{
_logger.Error(ex, "处理相机图像帧时出错");
}
}
private void OnGrabError(object sender, GrabErrorEventArgs e)
{
_logger.Warn($"相机采集错误: ErrorCode={e.ErrorCode}, {e.ErrorDescription}");
}
private void OnConnectionLost(object sender, EventArgs e)
{
_liveViewRunning = false;
_appState.UpdateCameraState(CameraState.Default);
_logger.Warn("相机连接意外断开");
var app = Application.Current;
app?.Dispatcher.BeginInvoke(() => ConnectionLost?.Invoke(this, EventArgs.Empty));
}
private void UpdateStreamingState(bool isStreaming)
{
var current = _appState.CameraState;
_appState.UpdateCameraState(current with { IsStreaming = isStreaming });
}
public void Dispose()
{
if (_disposed) return;
_disposed = true;
_liveViewRunning = false;
_controller.ImageGrabbed -= OnImageGrabbed;
_controller.GrabError -= OnGrabError;
_controller.ConnectionLost -= OnConnectionLost;
_controller.Dispose();
_logger.Info("CameraService 已释放");
}
}
}
@@ -0,0 +1,55 @@
using System;
using System.Windows.Media.Imaging;
using XP.Camera;
namespace XplorePlane.Services.Camera
{
/// <summary>
/// 相机管理服务接口,封装 ICameraController 并桥接到 AppStateService。
/// </summary>
public interface ICameraService : IDisposable
{
bool IsConnected { get; }
bool IsGrabbing { get; }
bool IsLiveView { get; }
/// <summary>连接相机,返回设备信息。</summary>
CameraInfo Connect();
/// <summary>断开相机连接。</summary>
void Disconnect();
/// <summary>启动单帧采集(软件触发模式)。</summary>
void StartGrabbing();
/// <summary>触发一次采集。</summary>
void TriggerOnce();
/// <summary>停止采集。</summary>
void StopGrabbing();
/// <summary>启动实时预览(链式触发)。</summary>
void StartLiveView();
/// <summary>停止实时预览。</summary>
void StopLiveView();
// ── 参数读写 ──
double GetExposureTime();
void SetExposureTime(double microseconds);
double GetGain();
void SetGain(double value);
int GetWidth();
void SetWidth(int value);
int GetHeight();
void SetHeight(int value);
string GetPixelFormat();
void SetPixelFormat(string format);
/// <summary>最新一帧图像(已 Freeze,可跨线程)。</summary>
event EventHandler<BitmapSource> FrameArrived;
/// <summary>相机连接断开事件。</summary>
event EventHandler ConnectionLost;
}
}
@@ -22,12 +22,5 @@ namespace XplorePlane.Services
IProgress<double> progress = null,
CancellationToken cancellationToken = default);
Task<ushort[]> ProcessRawFrameAsync(
ushort[] pixelData,
int width,
int height,
string processorName,
IDictionary<string, object> parameters,
CancellationToken cancellationToken = default);
}
}
@@ -58,27 +58,5 @@ namespace XplorePlane.Services
return BitmapSource.Create(width, height, 96, 96, PixelFormats.Gray8, null, pixels, stride);
}
public static Image<Gray, ushort> ToEmguCV16(BitmapSource bitmapSource)
{
if (bitmapSource == null) throw new ArgumentNullException(nameof(bitmapSource));
var formatted = new FormatConvertedBitmap(bitmapSource, PixelFormats.Gray16, null, 0);
int width = formatted.PixelWidth;
int height = formatted.PixelHeight;
int stride = width * 2; // 2 bytes per pixel for 16-bit
byte[] rawBytes = new byte[height * stride];
formatted.CopyPixels(rawBytes, stride, 0);
ushort[] pixels = new ushort[width * height];
Buffer.BlockCopy(rawBytes, 0, pixels, 0, rawBytes.Length);
var image = new Image<Gray, ushort>(width, height);
// Copy pixel data row by row
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
image.Data[y, x, 0] = pixels[y * width + x];
return image;
}
}
}
@@ -16,13 +16,11 @@ namespace XplorePlane.Services
{
private readonly ILoggerService _logger;
private readonly ConcurrentDictionary<string, ImageProcessorBase> _processorRegistry;
private readonly ConcurrentDictionary<string, ImageProcessorBase16> _processorRegistry16;
public ImageProcessingService(ILoggerService logger)
{
_logger = logger?.ForModule<ImageProcessingService>() ?? throw new ArgumentNullException(nameof(logger));
_processorRegistry = new ConcurrentDictionary<string, ImageProcessorBase>();
_processorRegistry16 = new ConcurrentDictionary<string, ImageProcessorBase16>();
RegisterBuiltInProcessors();
}
@@ -39,20 +37,10 @@ namespace XplorePlane.Services
_processorRegistry["ShockFilter"] = new ShockFilterProcessor();
_processorRegistry["BandPassFilter"] = new BandPassFilterProcessor();
// 16-bit processors (separate registry due to different base class)
_processorRegistry16["GaussianBlur16"] = new GaussianBlurProcessor16();
_processorRegistry16["FlatFieldCorrection16"] = new FlatFieldCorrectionProcessor16();
_logger.Info("Registered {Count8} 8-bit and {Count16} 16-bit built-in image processors",
_processorRegistry.Count, _processorRegistry16.Count);
_logger.Info("Registered {Count} built-in image processors", _processorRegistry.Count);
}
public IReadOnlyList<string> GetAvailableProcessors()
{
var all = new List<string>(_processorRegistry.Keys);
all.AddRange(_processorRegistry16.Keys);
return all.AsReadOnly();
}
public IReadOnlyList<string> GetAvailableProcessors() => new List<string>(_processorRegistry.Keys).AsReadOnly();
public void RegisterProcessor(string name, ImageProcessorBase processor)
{
@@ -66,8 +54,6 @@ namespace XplorePlane.Services
{
if (_processorRegistry.TryGetValue(processorName, out var processor))
return processor.GetParameters().AsReadOnly();
if (_processorRegistry16.TryGetValue(processorName, out var processor16))
return processor16.GetParameters().AsReadOnly();
throw new ArgumentException($"Processor not registered: {processorName}", nameof(processorName));
}
@@ -82,8 +68,6 @@ namespace XplorePlane.Services
{
if (_processorRegistry.TryGetValue(processorName, out var p))
return string.IsNullOrWhiteSpace(p.Name) ? processorName : p.Name;
if (_processorRegistry16.TryGetValue(processorName, out var p16))
return string.IsNullOrWhiteSpace(p16.Name) ? processorName : p16.Name;
return processorName;
}
@@ -142,49 +126,6 @@ namespace XplorePlane.Services
}, cancellationToken);
}
public async Task<ushort[]> ProcessRawFrameAsync(
ushort[] pixelData,
int width,
int height,
string processorName,
IDictionary<string, object> parameters,
CancellationToken cancellationToken = default)
{
if (pixelData == null)
throw new ArgumentException("pixelData cannot be null", nameof(pixelData));
if (pixelData.Length != width * height)
throw new ArgumentException(
$"pixelData length {pixelData.Length} does not match width*height {width * height}");
if (!_processorRegistry16.TryGetValue(processorName, out var processor))
throw new ArgumentException($"Processor not registered: {processorName}", nameof(processorName));
return await Task.Run(() =>
{
cancellationToken.ThrowIfCancellationRequested();
var image = new Image<Gray, ushort>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
image.Data[y, x, 0] = pixelData[y * width + x];
if (parameters != null)
{
foreach (var kvp in parameters)
processor.SetParameter(kvp.Key, kvp.Value);
}
var processed = processor.Process(image);
var result = new ushort[width * height];
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result[y * width + x] = processed.Data[y, x, 0];
return result;
}, cancellationToken);
}
public void Dispose()
{
foreach (var processor in _processorRegistry.Values)
@@ -194,12 +135,6 @@ namespace XplorePlane.Services
}
_processorRegistry.Clear();
foreach (var processor in _processorRegistry16.Values)
{
if (processor is IDisposable disposable)
disposable.Dispose();
}
_processorRegistry16.Clear();
}
}
}
@@ -0,0 +1,384 @@
using Prism.Commands;
using Prism.Mvvm;
using System;
using System.Collections.ObjectModel;
using System.Windows;
using System.Windows.Media.Imaging;
using XP.Camera;
using Serilog;
namespace XplorePlane.ViewModels
{
/// <summary>
/// 相机预览 ViewModel,移植自 ImageProcessing.MainWindowViewModel 的相机控制逻辑。
/// </summary>
public class NavigationPropertyPanelViewModel : BindableBase, IDisposable
{
private static readonly ILogger _logger = Log.ForContext<NavigationPropertyPanelViewModel>();
private readonly ICameraController _camera;
private volatile bool _liveViewRunning;
private bool _disposed;
#region Properties
private BitmapSource? _cameraImageSource;
public BitmapSource? CameraImageSource
{
get => _cameraImageSource;
set => SetProperty(ref _cameraImageSource, value);
}
private bool _isCameraConnected;
public bool IsCameraConnected
{
get => _isCameraConnected;
set
{
if (SetProperty(ref _isCameraConnected, value))
{
ConnectCameraCommand.RaiseCanExecuteChanged();
DisconnectCameraCommand.RaiseCanExecuteChanged();
StartGrabCommand.RaiseCanExecuteChanged();
StopGrabCommand.RaiseCanExecuteChanged();
ApplyExposureCommand.RaiseCanExecuteChanged();
ApplyGainCommand.RaiseCanExecuteChanged();
ApplyWidthCommand.RaiseCanExecuteChanged();
ApplyHeightCommand.RaiseCanExecuteChanged();
ApplyPixelFormatCommand.RaiseCanExecuteChanged();
RefreshCameraParamsCommand.RaiseCanExecuteChanged();
OpenCameraSettingsCommand.RaiseCanExecuteChanged();
}
}
}
private bool _isCameraGrabbing;
public bool IsCameraGrabbing
{
get => _isCameraGrabbing;
set
{
if (SetProperty(ref _isCameraGrabbing, value))
{
StartGrabCommand.RaiseCanExecuteChanged();
StopGrabCommand.RaiseCanExecuteChanged();
}
}
}
private string _cameraStatusText = "未连接";
public string CameraStatusText
{
get => _cameraStatusText;
set => SetProperty(ref _cameraStatusText, value);
}
private bool _isLiveViewEnabled;
public bool IsLiveViewEnabled
{
get => _isLiveViewEnabled;
set
{
if (SetProperty(ref _isLiveViewEnabled, value))
{
if (value)
StartLiveView();
else
StopLiveView();
}
}
}
private string _cameraPixelCoord = "";
public string CameraPixelCoord
{
get => _cameraPixelCoord;
set => SetProperty(ref _cameraPixelCoord, value);
}
private double _exposureTime;
public double ExposureTime
{
get => _exposureTime;
set => SetProperty(ref _exposureTime, value);
}
private double _gainValue;
public double GainValue
{
get => _gainValue;
set => SetProperty(ref _gainValue, value);
}
private int _imageWidth;
public int ImageWidth
{
get => _imageWidth;
set => SetProperty(ref _imageWidth, value);
}
private int _imageHeight;
public int ImageHeight
{
get => _imageHeight;
set => SetProperty(ref _imageHeight, value);
}
private string _selectedPixelFormat = "Mono8";
public string SelectedPixelFormat
{
get => _selectedPixelFormat;
set => SetProperty(ref _selectedPixelFormat, value);
}
public ObservableCollection<string> PixelFormatOptions { get; } = new() { "Mono8", "BGR8", "BGRA8" };
#endregion
#region Commands
public DelegateCommand ConnectCameraCommand { get; }
public DelegateCommand DisconnectCameraCommand { get; }
public DelegateCommand StartGrabCommand { get; }
public DelegateCommand StopGrabCommand { get; }
public DelegateCommand ApplyExposureCommand { get; }
public DelegateCommand ApplyGainCommand { get; }
public DelegateCommand ApplyWidthCommand { get; }
public DelegateCommand ApplyHeightCommand { get; }
public DelegateCommand ApplyPixelFormatCommand { get; }
public DelegateCommand RefreshCameraParamsCommand { get; }
public DelegateCommand OpenCameraSettingsCommand { get; }
#endregion
public NavigationPropertyPanelViewModel(ICameraController camera)
{
_camera = camera ?? throw new ArgumentNullException(nameof(camera));
ConnectCameraCommand = new DelegateCommand(ConnectCamera, () => !IsCameraConnected);
DisconnectCameraCommand = new DelegateCommand(DisconnectCamera, () => IsCameraConnected);
StartGrabCommand = new DelegateCommand(StartGrab, () => IsCameraConnected && !IsCameraGrabbing);
StopGrabCommand = new DelegateCommand(StopGrab, () => IsCameraGrabbing);
ApplyExposureCommand = new DelegateCommand(() => ApplyCameraParam(() => _camera.SetExposureTime(ExposureTime)), () => IsCameraConnected);
ApplyGainCommand = new DelegateCommand(() => ApplyCameraParam(() => _camera.SetGain(GainValue)), () => IsCameraConnected);
ApplyWidthCommand = new DelegateCommand(() => ApplyCameraParam(() => _camera.SetWidth(ImageWidth)), () => IsCameraConnected);
ApplyHeightCommand = new DelegateCommand(() => ApplyCameraParam(() => _camera.SetHeight(ImageHeight)), () => IsCameraConnected);
ApplyPixelFormatCommand = new DelegateCommand(() => ApplyCameraParam(() => _camera.SetPixelFormat(SelectedPixelFormat)), () => IsCameraConnected);
RefreshCameraParamsCommand = new DelegateCommand(RefreshCameraParams, () => IsCameraConnected);
OpenCameraSettingsCommand = new DelegateCommand(OpenCameraSettings, () => IsCameraConnected);
}
#region Camera Methods
private void ConnectCamera()
{
try
{
_camera.ImageGrabbed += OnCameraImageGrabbed;
_camera.GrabError += OnCameraGrabError;
_camera.ConnectionLost += OnCameraConnectionLost;
var info = _camera.Open();
IsCameraConnected = true;
CameraStatusText = $"已连接: {info.ModelName} (SN: {info.SerialNumber})";
_logger.Information("Camera connected: {ModelName}", info.ModelName);
RefreshCameraParams();
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to connect camera");
CameraStatusText = $"连接失败: {ex.Message}";
IsCameraConnected = false;
}
}
private void DisconnectCamera()
{
try
{
IsLiveViewEnabled = false;
_camera.Close();
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to disconnect camera");
}
finally
{
_camera.ImageGrabbed -= OnCameraImageGrabbed;
_camera.GrabError -= OnCameraGrabError;
_camera.ConnectionLost -= OnCameraConnectionLost;
IsCameraConnected = false;
IsCameraGrabbing = false;
CameraStatusText = "未连接";
CameraImageSource = null;
_logger.Information("Camera disconnected");
}
}
private void StartGrab()
{
try
{
_camera.StartGrabbing();
IsCameraGrabbing = true;
CameraStatusText = "采集中...";
// 如果已勾选实时,自动启动 Live View
if (IsLiveViewEnabled)
{
StartLiveView();
}
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to start grabbing");
CameraStatusText = $"采集失败: {ex.Message}";
}
}
private void StopGrab()
{
try
{
IsLiveViewEnabled = false;
_camera.StopGrabbing();
IsCameraGrabbing = false;
CameraStatusText = "已停止采集";
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to stop grabbing");
}
}
private void StartLiveView()
{
if (!IsCameraGrabbing) return;
_liveViewRunning = true;
CameraStatusText = "实时采集中...";
try { _camera.ExecuteSoftwareTrigger(); }
catch (Exception ex) { _logger.Error(ex, "Live view trigger failed"); }
}
private void StopLiveView()
{
_liveViewRunning = false;
if (IsCameraGrabbing)
CameraStatusText = "采集中...";
}
private void RefreshCameraParams()
{
try
{
ExposureTime = _camera.GetExposureTime();
GainValue = _camera.GetGain();
ImageWidth = _camera.GetWidth();
ImageHeight = _camera.GetHeight();
SelectedPixelFormat = _camera.GetPixelFormat();
_logger.Information("Camera parameters refreshed");
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to read camera parameters");
CameraStatusText = $"读取参数失败: {ex.Message}";
}
}
private void ApplyCameraParam(Action action)
{
try
{
action();
_logger.Information("Camera parameter applied");
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to apply camera parameter");
CameraStatusText = $"设置参数失败: {ex.Message}";
}
}
private void OpenCameraSettings()
{
RefreshCameraParams();
var window = new Views.CameraSettingsWindow(this);
window.Owner = Application.Current.MainWindow;
window.Show();
}
#endregion
#region Camera Event Handlers
private void OnCameraImageGrabbed(object? sender, ImageGrabbedEventArgs e)
{
try
{
var bitmap = PixelConverter.ToBitmapSource(e.PixelData, e.Width, e.Height, e.PixelFormat);
var app = Application.Current;
if (app == null) return;
app.Dispatcher.Invoke(() =>
{
CameraImageSource = bitmap;
});
if (_liveViewRunning)
{
_camera.ExecuteSoftwareTrigger();
}
}
catch (Exception ex)
{
_logger.Error(ex, "Failed to process camera image");
}
}
private void OnCameraGrabError(object? sender, GrabErrorEventArgs e)
{
_logger.Error("Camera grab error: [{ErrorCode}] {ErrorDescription}", e.ErrorCode, e.ErrorDescription);
var app = Application.Current;
if (app == null) return;
app.Dispatcher.Invoke(() =>
{
CameraStatusText = $"采集错误: {e.ErrorDescription}";
});
}
private void OnCameraConnectionLost(object? sender, EventArgs e)
{
_logger.Warning("Camera connection lost");
var app = Application.Current;
if (app == null) return;
app.Dispatcher.Invoke(() =>
{
IsCameraConnected = false;
IsCameraGrabbing = false;
CameraStatusText = "连接已断开";
CameraImageSource = null;
});
}
#endregion
#region IDisposable
public void Dispose()
{
if (_disposed) return;
_liveViewRunning = false;
try { _camera.Dispose(); }
catch (Exception ex) { _logger.Error(ex, "Error disposing camera"); }
_disposed = true;
}
#endregion
}
}
@@ -0,0 +1,57 @@
<Window x:Class="XplorePlane.Views.CameraSettingsWindow"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
Title="相机参数设置"
Width="320" Height="420"
WindowStartupLocation="CenterOwner"
ResizeMode="NoResize"
ShowInTaskbar="False">
<StackPanel Margin="15">
<TextBlock Text="曝光时间 (µs)" FontSize="11" Foreground="#666" Margin="0,0,0,2" />
<DockPanel Margin="0,0,0,10">
<Button DockPanel.Dock="Right" Content="设置" Width="45" Height="26" FontSize="11"
Margin="6,0,0,0" Command="{Binding ApplyExposureCommand}" />
<TextBox Text="{Binding ExposureTime, UpdateSourceTrigger=PropertyChanged}"
Height="26" FontSize="12" VerticalContentAlignment="Center" Padding="4,0" />
</DockPanel>
<TextBlock Text="增益" FontSize="11" Foreground="#666" Margin="0,0,0,2" />
<DockPanel Margin="0,0,0,10">
<Button DockPanel.Dock="Right" Content="设置" Width="45" Height="26" FontSize="11"
Margin="6,0,0,0" Command="{Binding ApplyGainCommand}" />
<TextBox Text="{Binding GainValue, UpdateSourceTrigger=PropertyChanged}"
Height="26" FontSize="12" VerticalContentAlignment="Center" Padding="4,0" />
</DockPanel>
<TextBlock Text="图像宽度 (px)" FontSize="11" Foreground="#666" Margin="0,0,0,2" />
<DockPanel Margin="0,0,0,10">
<Button DockPanel.Dock="Right" Content="设置" Width="45" Height="26" FontSize="11"
Margin="6,0,0,0" Command="{Binding ApplyWidthCommand}" />
<TextBox Text="{Binding ImageWidth, UpdateSourceTrigger=PropertyChanged}"
Height="26" FontSize="12" VerticalContentAlignment="Center" Padding="4,0" />
</DockPanel>
<TextBlock Text="图像高度 (px)" FontSize="11" Foreground="#666" Margin="0,0,0,2" />
<DockPanel Margin="0,0,0,10">
<Button DockPanel.Dock="Right" Content="设置" Width="45" Height="26" FontSize="11"
Margin="6,0,0,0" Command="{Binding ApplyHeightCommand}" />
<TextBox Text="{Binding ImageHeight, UpdateSourceTrigger=PropertyChanged}"
Height="26" FontSize="12" VerticalContentAlignment="Center" Padding="4,0" />
</DockPanel>
<TextBlock Text="像素格式" FontSize="11" Foreground="#666" Margin="0,0,0,2" />
<DockPanel Margin="0,0,0,10">
<Button DockPanel.Dock="Right" Content="设置" Width="45" Height="26" FontSize="11"
Margin="6,0,0,0" Command="{Binding ApplyPixelFormatCommand}" />
<ComboBox SelectedItem="{Binding SelectedPixelFormat}"
ItemsSource="{Binding PixelFormatOptions}"
Height="26" FontSize="12" VerticalContentAlignment="Center" />
</DockPanel>
<Rectangle Height="1" Fill="#E0E0E0" Margin="0,2,0,10" />
<Button Content="读取当前参数" Height="30" FontSize="12"
Command="{Binding RefreshCameraParamsCommand}" />
</StackPanel>
</Window>
@@ -0,0 +1,13 @@
using System.Windows;
namespace XplorePlane.Views
{
public partial class CameraSettingsWindow : Window
{
public CameraSettingsWindow(object viewModel)
{
InitializeComponent();
DataContext = viewModel;
}
}
}

Some files were not shown because too many files have changed in this diff Show More