规范类名及命名空间名称

This commit is contained in:
李伟
2026-04-13 14:35:37 +08:00
parent c430ec229b
commit ace1c70ddf
217 changed files with 1271 additions and 1384 deletions
@@ -0,0 +1,50 @@
using System.Globalization;
using System.Resources;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 本地化辅助类,用于管ç†å¤šè¯­è¨€èµ„æº
/// ä¸?ImageProcessing ä¸»é¡¹ç›®çš„è¯­è¨€è®¾ç½®åŒæ­¥
/// </summary>
public static class LocalizationHelper
{
private static ResourceManager? _resourceManager;
/// <summary>
/// 资æºç®¡ç†å™?
/// </summary>
private static ResourceManager ResourceManager
{
get
{
if (_resourceManager == null)
{
_resourceManager = new ResourceManager(
"XP.ImageProcessing.Processors.Resources.Resources",
typeof(LocalizationHelper).Assembly);
}
return _resourceManager;
}
}
/// <summary>
/// èŽ·å–æœ¬åœ°åŒ–字符串
/// ä½¿ç”¨å½“å‰ UI æ–‡åŒ–ï¼ˆä¸Žä¸»é¡¹ç›®åŒæ­¥ï¼‰
/// </summary>
/// <param name="key">资æºé”?/param>
/// <returns>本地化字符串</returns>
public static string GetString(string key)
{
try
{
// 使用 CultureInfo.CurrentUICultureï¼Œè¿™ä¼šè‡ªåŠ¨ä¸Žä¸»é¡¹ç›®çš„è¯­è¨€è®¾ç½®åŒæ­¥
var value = ResourceManager.GetString(key, CultureInfo.CurrentUICulture);
return value ?? key;
}
catch
{
return key;
}
}
}
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,44 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RootNamespace>XP.ImageProcessing.Processors</RootNamespace>
<AssemblyName>XP.ImageProcessing.Processors</AssemblyName>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Update="Resources\Resources.resx">
<Generator>PublicResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
</EmbeddedResource>
<EmbeddedResource Update="Resources\Resources.zh-CN.resx">
<DependentUpon>Resources.resx</DependentUpon>
</EmbeddedResource>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Emgu.CV" Version="4.10.0.5680" />
<PackageReference Include="Emgu.CV.runtime.windows" Version="4.10.0.5680" />
<PackageReference Include="Emgu.CV.Bitmap" Version="4.10.0.5680" />
<PackageReference Include="Microsoft.ML.OnnxRuntime.Gpu" Version="1.17.3" />
<PackageReference Include="Serilog" Version="4.3.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.1.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\XP.ImageProcessing.Core\XP.ImageProcessing.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Update="Resources\Resources.Designer.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
</ItemGroup>
</Project>
@@ -0,0 +1,196 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? FilmEffectProcessor.cs
// æè¿°: 电å­èƒ¶ç‰‡æ•ˆæžœç®—å­ï¼Œæ¨¡æ‹Ÿä¼ ç»ŸX射线胶片的显示效æž?
// 功能:
// - 窗宽窗ä½ï¼ˆWindow/Level)调æ•?
// - 胶片å转(正ç‰?负片ï¼?
// - 多ç§èƒ¶ç‰‡ç‰¹æ€§æ›²çº¿ï¼ˆçº¿æ€§ã€S曲线ã€å¯¹æ•°ã€æŒ‡æ•°ï¼‰
// - 边缘增强(模拟胶片é”化效果)
// - 使用查找表(LUT)加速处ç?
// 算法: çª—å®½çª—ä½æ˜ å°„ + ç‰¹æ€§æ›²çº¿å˜æ?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 电å­èƒ¶ç‰‡æ•ˆæžœç®—å­
/// </summary>
public class FilmEffectProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<FilmEffectProcessor>();
private byte[] _lut = new byte[256];
public FilmEffectProcessor()
{
Name = LocalizationHelper.GetString("FilmEffectProcessor_Name");
Description = LocalizationHelper.GetString("FilmEffectProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("WindowCenter", new ProcessorParameter(
"WindowCenter",
LocalizationHelper.GetString("FilmEffectProcessor_WindowCenter"),
typeof(int),
128,
0,
255,
LocalizationHelper.GetString("FilmEffectProcessor_WindowCenter_Desc")));
Parameters.Add("WindowWidth", new ProcessorParameter(
"WindowWidth",
LocalizationHelper.GetString("FilmEffectProcessor_WindowWidth"),
typeof(int),
255,
1,
255,
LocalizationHelper.GetString("FilmEffectProcessor_WindowWidth_Desc")));
Parameters.Add("Invert", new ProcessorParameter(
"Invert",
LocalizationHelper.GetString("FilmEffectProcessor_Invert"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("FilmEffectProcessor_Invert_Desc")));
Parameters.Add("Curve", new ProcessorParameter(
"Curve",
LocalizationHelper.GetString("FilmEffectProcessor_Curve"),
typeof(string),
"Linear",
null,
null,
LocalizationHelper.GetString("FilmEffectProcessor_Curve_Desc"),
new string[] { "Linear", "Sigmoid", "Logarithmic", "Exponential" }));
Parameters.Add("CurveStrength", new ProcessorParameter(
"CurveStrength",
LocalizationHelper.GetString("FilmEffectProcessor_CurveStrength"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("FilmEffectProcessor_CurveStrength_Desc")));
Parameters.Add("EdgeEnhance", new ProcessorParameter(
"EdgeEnhance",
LocalizationHelper.GetString("FilmEffectProcessor_EdgeEnhance"),
typeof(double),
0.0,
0.0,
3.0,
LocalizationHelper.GetString("FilmEffectProcessor_EdgeEnhance_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int windowCenter = GetParameter<int>("WindowCenter");
int windowWidth = GetParameter<int>("WindowWidth");
bool invert = GetParameter<bool>("Invert");
string curve = GetParameter<string>("Curve");
double curveStrength = GetParameter<double>("CurveStrength");
double edgeEnhance = GetParameter<double>("EdgeEnhance");
// 构建查找�
BuildLUT(windowCenter, windowWidth, invert, curve, curveStrength);
// 应用 LUT
var result = inputImage.Clone();
int width = result.Width;
int height = result.Height;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
result.Data[y, x, 0] = _lut[result.Data[y, x, 0]];
}
}
// 边缘增强(模拟胶片é”化)
if (edgeEnhance > 0.01)
{
using var blurred = inputImage.SmoothGaussian(3);
using var detail = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float diff = inputImage.Data[y, x, 0] - blurred.Data[y, x, 0];
float enhanced = result.Data[y, x, 0] + (float)(diff * edgeEnhance);
result.Data[y, x, 0] = (byte)Math.Clamp((int)enhanced, 0, 255);
}
}
}
_logger.Debug("Process: WC={WC}, WW={WW}, Invert={Inv}, Curve={Curve}, Strength={Str}, Edge={Edge}",
windowCenter, windowWidth, invert, curve, curveStrength, edgeEnhance);
return result;
}
private void BuildLUT(int wc, int ww, bool invert, string curve, double strength)
{
double halfW = ww / 2.0;
double low = wc - halfW;
double high = wc + halfW;
for (int i = 0; i < 256; i++)
{
// çª—å®½çª—ä½æ˜ å°„åˆ?[0, 1]
double normalized;
if (ww <= 1)
normalized = i >= wc ? 1.0 : 0.0;
else
normalized = Math.Clamp((i - low) / (high - low), 0.0, 1.0);
// 应用特性曲�
double mapped = curve switch
{
"Sigmoid" => ApplySigmoid(normalized, strength),
"Logarithmic" => ApplyLogarithmic(normalized, strength),
"Exponential" => ApplyExponential(normalized, strength),
_ => normalized // Linear
};
// å转(负片效果)
if (invert)
mapped = 1.0 - mapped;
_lut[i] = (byte)Math.Clamp((int)(mapped * 255.0), 0, 255);
}
}
/// <summary>S曲线(Sigmoid):增强中间调对比度</summary>
private static double ApplySigmoid(double x, double strength)
{
double k = strength * 10.0;
return 1.0 / (1.0 + Math.Exp(-k * (x - 0.5)));
}
/// <summary>对数曲线:æäº®æš—部,压缩亮部</summary>
private static double ApplyLogarithmic(double x, double strength)
{
double c = strength;
return Math.Log(1.0 + c * x) / Math.Log(1.0 + c);
}
/// <summary>指数曲线:压缩暗部,增强亮部</summary>
private static double ApplyExponential(double x, double strength)
{
double c = strength;
return (Math.Exp(c * x) - 1.0) / (Math.Exp(c) - 1.0);
}
}
@@ -0,0 +1,149 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? PseudoColorProcessor.cs
// æè¿°: 伪色彩渲染算å­ï¼Œå°†ç°åº¦å›¾åƒæ˜ å°„为彩色图åƒ
// 功能:
// - 支æŒå¤šç§ OpenCV 内置色彩映射表(Jetã€Hotã€Coolã€Rainbow 等)
// - å¯é€‰ç°åº¦èŒƒå›´è£å‰ªï¼Œçªå‡ºæ„Ÿå…´è¶£çš„ç°åº¦åŒºé—´
// - å¯é€‰å转色彩映射方å?
// 算法: 查找表(LUT)色彩映�
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 伪色彩渲染算�
/// </summary>
public class PseudoColorProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<PseudoColorProcessor>();
public PseudoColorProcessor()
{
Name = LocalizationHelper.GetString("PseudoColorProcessor_Name");
Description = LocalizationHelper.GetString("PseudoColorProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("ColorMap", new ProcessorParameter(
"ColorMap",
LocalizationHelper.GetString("PseudoColorProcessor_ColorMap"),
typeof(string),
"Jet",
null,
null,
LocalizationHelper.GetString("PseudoColorProcessor_ColorMap_Desc"),
new string[] { "Jet", "Hot", "Cool", "Rainbow", "HSV", "Turbo", "Inferno", "Magma", "Plasma", "Bone", "Ocean", "Spring", "Summer", "Autumn", "Winter" }));
Parameters.Add("MinValue", new ProcessorParameter(
"MinValue",
LocalizationHelper.GetString("PseudoColorProcessor_MinValue"),
typeof(int),
0,
0,
255,
LocalizationHelper.GetString("PseudoColorProcessor_MinValue_Desc")));
Parameters.Add("MaxValue", new ProcessorParameter(
"MaxValue",
LocalizationHelper.GetString("PseudoColorProcessor_MaxValue"),
typeof(int),
255,
0,
255,
LocalizationHelper.GetString("PseudoColorProcessor_MaxValue_Desc")));
Parameters.Add("InvertMap", new ProcessorParameter(
"InvertMap",
LocalizationHelper.GetString("PseudoColorProcessor_InvertMap"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("PseudoColorProcessor_InvertMap_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string colorMapName = GetParameter<string>("ColorMap");
int minValue = GetParameter<int>("MinValue");
int maxValue = GetParameter<int>("MaxValue");
bool invertMap = GetParameter<bool>("InvertMap");
OutputData.Clear();
// ç°åº¦èŒƒå›´è£å‰ªä¸Žå½’一åŒ?
Image<Gray, byte> normalized;
if (minValue > 0 || maxValue < 255)
{
// �[minValue, maxValue] 映射�[0, 255]
normalized = inputImage.Clone();
double scale = 255.0 / Math.Max(maxValue - minValue, 1);
for (int y = 0; y < normalized.Height; y++)
{
for (int x = 0; x < normalized.Width; x++)
{
int val = normalized.Data[y, x, 0];
val = Math.Clamp(val, minValue, maxValue);
normalized.Data[y, x, 0] = (byte)((val - minValue) * scale);
}
}
}
else
{
normalized = inputImage.Clone();
}
// å转ç°åº¦ï¼ˆå转色彩映射方å‘)
if (invertMap)
{
CvInvoke.BitwiseNot(normalized, normalized);
}
// 应用色彩映射
ColorMapType cmType = colorMapName switch
{
"Hot" => ColorMapType.Hot,
"Cool" => ColorMapType.Cool,
"Rainbow" => ColorMapType.Rainbow,
"HSV" => ColorMapType.Hsv,
"Turbo" => ColorMapType.Turbo,
"Inferno" => ColorMapType.Inferno,
"Magma" => ColorMapType.Magma,
"Plasma" => ColorMapType.Plasma,
"Bone" => ColorMapType.Bone,
"Ocean" => ColorMapType.Ocean,
"Spring" => ColorMapType.Spring,
"Summer" => ColorMapType.Summer,
"Autumn" => ColorMapType.Autumn,
"Winter" => ColorMapType.Winter,
_ => ColorMapType.Jet
};
using var colorMat = new Mat();
CvInvoke.ApplyColorMap(normalized.Mat, colorMat, cmType);
var colorImage = colorMat.ToImage<Bgr, byte>();
// 将彩色图åƒå­˜å…?OutputData,供 UI 显示
OutputData["PseudoColorImage"] = colorImage;
_logger.Debug("Process: ColorMap={ColorMap}, MinValue={Min}, MaxValue={Max}, InvertMap={Invert}",
colorMapName, minValue, maxValue, invertMap);
normalized.Dispose();
// 返回原始ç°åº¦å›¾åƒï¼ˆå½©è‰²å›¾åƒé€šè¿‡ OutputData 传递)
return inputImage.Clone();
}
}
@@ -0,0 +1,80 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? GrayscaleProcessor.cs
// 讛膩: 啣漲曇蓮摮琜脣㦛讛蓮V蛹啣漲
// :
// - 啣漲頧祆揢嚗
// - 撟喳
// - 憭批
// - 撠誩
// 蝞埈: 撟喳瘜?Gray = 0.299*R + 0.587*G + 0.114*B
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 啣漲曇蓮摮?
/// </summary>
public class GrayscaleProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<GrayscaleProcessor>();
public GrayscaleProcessor()
{
Name = LocalizationHelper.GetString("GrayscaleProcessor_Name");
Description = LocalizationHelper.GetString("GrayscaleProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("GrayscaleProcessor_Method"),
typeof(string),
"Weighted",
null,
null,
LocalizationHelper.GetString("GrayscaleProcessor_Method_Desc"),
new string[] { "Weighted", "Average", "Max", "Min" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
// 憒颲枏撌脩摨血㦛嚗峕覔格䲮瘜閗?
var result = inputImage.Clone();
switch (method)
{
case "Average":
// 撖嫣撌脩摨衣銝齿㺿睃㦛?
break;
case "Max":
// 憓𧼮撩鈭桀漲
result = result * 1.2;
break;
case "Min":
// 鈭桀漲
result = result * 0.8;
break;
case "Weighted":
default:
// 靽脲
break;
}
_logger.Debug("Process: Method = {Method}", method);
return result;
}
}
@@ -0,0 +1,67 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? MirrorProcessor.cs
// æè¿°: 镜åƒç®—å­ï¼Œç”¨äºŽå›¾åƒç¿»è½?
// 功能:
// - 水平镜åƒï¼ˆå·¦å³ç¿»è½¬ï¼‰
// - 垂直镜åƒï¼ˆä¸Šä¸‹ç¿»è½¬ï¼‰
// - 对角镜åƒï¼ˆæ°´å¹?垂直翻转,等æ•?80°旋转ï¼?
// 算法: åƒç´ å标映射
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 镜åƒç®—å­
/// </summary>
public class MirrorProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MirrorProcessor>();
public MirrorProcessor()
{
Name = LocalizationHelper.GetString("MirrorProcessor_Name");
Description = LocalizationHelper.GetString("MirrorProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Direction", new ProcessorParameter(
"Direction",
LocalizationHelper.GetString("MirrorProcessor_Direction"),
typeof(string),
"Horizontal",
null,
null,
LocalizationHelper.GetString("MirrorProcessor_Direction_Desc"),
new string[] { "Horizontal", "Vertical", "Both" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string direction = GetParameter<string>("Direction");
var result = inputImage.Clone();
FlipType flipType = direction switch
{
"Vertical" => FlipType.Vertical,
"Both" => FlipType.Both,
_ => FlipType.Horizontal
};
CvInvoke.Flip(inputImage, result, flipType);
_logger.Debug("Process: Direction = {Direction}", direction);
return result;
}
}
@@ -0,0 +1,140 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? RotateProcessor.cs
// æè¿°: å›¾åƒæ—‹è½¬ç®—å­
// 功能:
// - ä»»æ„角度旋转
// - 支æŒä¿æŒåŽŸå§‹å°ºå¯¸æˆ–è‡ªé€‚åº”æ‰©å±•ç”»å¸ƒ
// - å¯é€‰èƒŒæ™¯å¡«å……å€?
// - 支æŒåŒçº¿æ€§æ’å€?
// 算法: ä»¿å°„å˜æ¢æ—‹è½¬
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// å›¾åƒæ—‹è½¬ç®—å­
/// </summary>
public class RotateProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<RotateProcessor>();
public RotateProcessor()
{
Name = LocalizationHelper.GetString("RotateProcessor_Name");
Description = LocalizationHelper.GetString("RotateProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Angle", new ProcessorParameter(
"Angle",
LocalizationHelper.GetString("RotateProcessor_Angle"),
typeof(double),
90.0,
-360.0,
360.0,
LocalizationHelper.GetString("RotateProcessor_Angle_Desc")));
Parameters.Add("ExpandCanvas", new ProcessorParameter(
"ExpandCanvas",
LocalizationHelper.GetString("RotateProcessor_ExpandCanvas"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("RotateProcessor_ExpandCanvas_Desc")));
Parameters.Add("BackgroundValue", new ProcessorParameter(
"BackgroundValue",
LocalizationHelper.GetString("RotateProcessor_BackgroundValue"),
typeof(int),
0,
0,
255,
LocalizationHelper.GetString("RotateProcessor_BackgroundValue_Desc")));
Parameters.Add("Interpolation", new ProcessorParameter(
"Interpolation",
LocalizationHelper.GetString("RotateProcessor_Interpolation"),
typeof(string),
"Bilinear",
null,
null,
LocalizationHelper.GetString("RotateProcessor_Interpolation_Desc"),
new string[] { "Nearest", "Bilinear", "Bicubic" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double angle = GetParameter<double>("Angle");
bool expandCanvas = GetParameter<bool>("ExpandCanvas");
int bgValue = GetParameter<int>("BackgroundValue");
string interpolation = GetParameter<string>("Interpolation");
Inter interMethod = interpolation switch
{
"Nearest" => Inter.Nearest,
"Bicubic" => Inter.Cubic,
_ => Inter.Linear
};
int srcW = inputImage.Width;
int srcH = inputImage.Height;
PointF center = new PointF(srcW / 2.0f, srcH / 2.0f);
// èŽ·å–æ—‹è½¬çŸ©é˜µ
using var rotMat = new Mat();
CvInvoke.GetRotationMatrix2D(center, angle, 1.0, rotMat);
int dstW, dstH;
if (expandCanvas)
{
// 计算旋转åŽèƒ½å®¹çº³æ•´å¹…图åƒçš„画布尺å¯?
double rad = Math.Abs(angle * Math.PI / 180.0);
double sinA = Math.Abs(Math.Sin(rad));
double cosA = Math.Abs(Math.Cos(rad));
dstW = (int)Math.Ceiling(srcW * cosA + srcH * sinA);
dstH = (int)Math.Ceiling(srcW * sinA + srcH * cosA);
// 调整旋转矩阵的平移分é‡ï¼Œä½¿å›¾åƒå±…ä¸?
double[] m = new double[6];
rotMat.CopyTo(m);
m[2] += (dstW - srcW) / 2.0;
m[5] += (dstH - srcH) / 2.0;
// 写回矩阵
using var adjusted = new Mat(2, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
System.Runtime.InteropServices.Marshal.Copy(m, 0, adjusted.DataPointer, 6);
var result = new Image<Gray, byte>(dstW, dstH, new Gray(bgValue));
CvInvoke.WarpAffine(inputImage, result, adjusted, new Size(dstW, dstH),
interMethod, Warp.Default, BorderType.Constant, new MCvScalar(bgValue));
_logger.Debug("Process: Angle={Angle}, ExpandCanvas=true, Size={W}x{H}", angle, dstW, dstH);
return result;
}
else
{
dstW = srcW;
dstH = srcH;
var result = new Image<Gray, byte>(dstW, dstH, new Gray(bgValue));
CvInvoke.WarpAffine(inputImage, result, rotMat, new Size(dstW, dstH),
interMethod, Warp.Default, BorderType.Constant, new MCvScalar(bgValue));
_logger.Debug("Process: Angle={Angle}, ExpandCanvas=false", angle);
return result;
}
}
}
@@ -0,0 +1,106 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? ThresholdProcessor.cs
// æè¿°: 阈值分割算å­ï¼Œç”¨äºŽå›¾åƒäºŒå€¼åŒ–处ç
// 功能:
// - 固定阈值二值化
// - Otsu自动阈值计�
// - å¯è°ƒèŠ‚é˜ˆå€¼å’Œæœ€å¤§å€?
// - å°†ç°åº¦å›¾åƒè½¬æ¢ä¸ºäºŒå€¼å›¾åƒ?
// 算法: 阈值分割ã€Otsu算法
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 阈值分割算�
/// </summary>
public class ThresholdProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ThresholdProcessor>();
public ThresholdProcessor()
{
Name = LocalizationHelper.GetString("ThresholdProcessor_Name");
Description = LocalizationHelper.GetString("ThresholdProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold",
LocalizationHelper.GetString("ThresholdProcessor_MinThreshold"),
typeof(int),
64,
0,
255,
LocalizationHelper.GetString("ThresholdProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold",
LocalizationHelper.GetString("ThresholdProcessor_MaxThreshold"),
typeof(int),
192,
0,
255,
LocalizationHelper.GetString("ThresholdProcessor_MaxThreshold_Desc")));
Parameters.Add("UseOtsu", new ProcessorParameter(
"UseOtsu",
LocalizationHelper.GetString("ThresholdProcessor_UseOtsu"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ThresholdProcessor_UseOtsu_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int minThreshold = GetParameter<int>("MinThreshold");
int maxThreshold = GetParameter<int>("MaxThreshold");
bool useOtsu = GetParameter<bool>("UseOtsu");
var result = new Image<Gray, byte>(inputImage.Size);
if (useOtsu)
{
// 使用Otsu算法
CvInvoke.Threshold(inputImage, result, minThreshold, 255, ThresholdType.Otsu);
_logger.Debug("Process: UseOtsu = true");
}
else
{
// åŒé˜ˆå€¼åˆ†å‰²ï¼šä»‹äºŽMinThresholdå’ŒMaxThresholdä¹‹é—´çš„ä¸ºå‰æ™¯(255),其他为背景(0)
byte[,,] inputData = inputImage.Data;
byte[,,] outputData = result.Data;
int height = inputImage.Height;
int width = inputImage.Width;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = inputData[y, x, 0];
outputData[y, x, 0] = (pixelValue >= minThreshold && pixelValue <= maxThreshold)
? (byte)255
: (byte)0;
}
}
_logger.Debug("Process: MinThreshold = {MinThreshold}, MaxThreshold = {MaxThreshold}",
minThreshold, maxThreshold);
}
return result;
}
}
@@ -0,0 +1,256 @@
// ============================================================================
// Copyright ツゥ 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 譁サカ蜷? ColorLayerProcessor.cs
// 謠剰ソー: 濶イ蠖ゥ蛻アらョ怜ュ撰シ悟ー蠎ヲ蝗セ蜒乗潔莠ョ蠎ヲ蛹コ髣エ蛻ア?
// 蜉溯:
// - 蟆蠎ヲ蝗セ蜒乗潔謖ョ壼アよ焚蝮劇蛻
// - 謾ッ謖∬螳壻ケ牙螻よ焚?~16螻ゑシ
// - 謾ッ謖∝插蛹€蛻アょ柱蝓コ莠?Otsu 逧騾ょコ泌
// - 蜿ッ騾我ソ晉蕗蜴溷ァ狗蠎ヲ謌匁丐蟆クコ遲蛾龍霍晉蠎ヲ
// 邂玲ウ: 轣ー蠎ヲ驥丞喧 / 螟夐蛟シ蛻?
// 菴懆€? 譚惹シ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 濶イ蠖ゥ蛻アらョ怜ュ撰シ悟ー蠎ヲ蝗セ蜒乗潔莠ョ蠎ヲ蛹コ髣エ蛻クコ螟壻クェ螻らコ?
/// </summary>
public class ColorLayerProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ColorLayerProcessor>();
public ColorLayerProcessor()
{
Name = LocalizationHelper.GetString("ColorLayerProcessor_Name");
Description = LocalizationHelper.GetString("ColorLayerProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Layers", new ProcessorParameter(
"Layers",
LocalizationHelper.GetString("ColorLayerProcessor_Layers"),
typeof(int),
4,
2,
16,
LocalizationHelper.GetString("ColorLayerProcessor_Layers_Desc")));
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("ColorLayerProcessor_Method"),
typeof(string),
"Uniform",
null,
null,
LocalizationHelper.GetString("ColorLayerProcessor_Method_Desc"),
new string[] { "Uniform", "Otsu" }));
Parameters.Add("OutputMode", new ProcessorParameter(
"OutputMode",
LocalizationHelper.GetString("ColorLayerProcessor_OutputMode"),
typeof(string),
"EqualSpaced",
null,
null,
LocalizationHelper.GetString("ColorLayerProcessor_OutputMode_Desc"),
new string[] { "EqualSpaced", "MidValue" }));
Parameters.Add("TargetLayer", new ProcessorParameter(
"TargetLayer",
LocalizationHelper.GetString("ColorLayerProcessor_TargetLayer"),
typeof(int),
0,
0,
16,
LocalizationHelper.GetString("ColorLayerProcessor_TargetLayer_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int layers = GetParameter<int>("Layers");
string method = GetParameter<string>("Method");
string outputMode = GetParameter<string>("OutputMode");
int targetLayer = GetParameter<int>("TargetLayer");
// 髯仙宛 targetLayer 闌
if (targetLayer < 0 || targetLayer > layers)
targetLayer = 0;
_logger.Debug("Process: Layers={Layers}, Method={Method}, OutputMode={OutputMode}, TargetLayer={TargetLayer}",
layers, method, outputMode, targetLayer);
// 隶。邂怜螻る蛟?
byte[] thresholds = method == "Otsu"
? ComputeOtsuMultiThresholds(inputImage, layers)
: ComputeUniformThresholds(layers);
// 隶。邂玲ッ丞アら噪霎灘轣ー蠎ヲ蛟?
byte[] layerValues = ComputeLayerValues(thresholds, layers, outputMode);
// 蠎皮畑蛻アよ丐蟆
int width = inputImage.Width;
int height = inputImage.Height;
var result = new Image<Gray, byte>(width, height);
var srcData = inputImage.Data;
var dstData = result.Data;
if (targetLayer == 0)
{
// 霎灘蜈ィ驛ィ螻?
Parallel.For(0, height, y =>
{
for (int x = 0; x < width; x++)
{
byte pixel = srcData[y, x, 0];
int layerIdx = GetLayerIndex(pixel, thresholds);
dstData[y, x, 0] = layerValues[layerIdx];
}
});
}
else
{
// 蜿ェ霎灘ョ壼アゑシ夐€我クュ螻ゆクコ 255育區会シ悟菴吩ク?0磯サ托シ?
int target = targetLayer - 1; // 蜿よ焚莉?蠑€蟋具シ悟驛ィ邏「蠑穂サ?蠑€蟋?
Parallel.For(0, height, y =>
{
for (int x = 0; x < width; x++)
{
byte pixel = srcData[y, x, 0];
int layerIdx = GetLayerIndex(pixel, thresholds);
dstData[y, x, 0] = (layerIdx == target) ? (byte)255 : (byte)0;
}
});
}
_logger.Debug("Process completed: {Layers} layers, target={TargetLayer}", layers, targetLayer);
return result;
}
/// <summary>
/// 蝮劇蛻アる蛟シ壼ー?[0, 255] 遲牙
/// </summary>
private static byte[] ComputeUniformThresholds(int layers)
{
var thresholds = new byte[layers - 1];
double step = 256.0 / layers;
for (int i = 0; i < layers - 1; i++)
thresholds[i] = (byte)Math.Clamp((int)((i + 1) * step), 0, 255);
return thresholds;
}
/// <summary>
/// 蝓コ莠 Otsu 逧、夐蛟シ蛻アゑシ夐€貞ス剃コ悟
/// </summary>
private static byte[] ComputeOtsuMultiThresholds(Image<Gray, byte> image, int layers)
{
// 隶。邂礼峩譁ケ蝗?
int[] histogram = new int[256];
var data = image.Data;
int h = image.Height, w = image.Width;
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
histogram[data[y, x, 0]]++;
// 騾貞ス Otsu 蛻
var thresholds = new List<byte>();
RecursiveOtsu(histogram, 0, 255, layers, thresholds);
thresholds.Sort();
return thresholds.ToArray();
}
/// <summary>
/// 騾貞ス Otsu壼惠 [low, high] 闌峩蜀伽譛€菴ウ髦亥€シ檎┯蜷朱€貞ス貞蜑イ
/// </summary>
private static void RecursiveOtsu(int[] histogram, int low, int high, int layers, List<byte> thresholds)
{
if (layers <= 1 || low >= high)
return;
// 蝨?[low, high] 闌峩蜀伽 Otsu 髦亥€?
long totalPixels = 0;
long totalSum = 0;
for (int i = low; i <= high; i++)
{
totalPixels += histogram[i];
totalSum += (long)i * histogram[i];
}
if (totalPixels == 0) return;
long bgPixels = 0, bgSum = 0;
double maxVariance = 0;
int bestThreshold = (low + high) / 2;
for (int t = low; t < high; t++)
{
bgPixels += histogram[t];
bgSum += (long)t * histogram[t];
long fgPixels = totalPixels - bgPixels;
if (bgPixels == 0 || fgPixels == 0) continue;
double bgMean = (double)bgSum / bgPixels;
double fgMean = (double)(totalSum - bgSum) / fgPixels;
double variance = (double)bgPixels * fgPixels * (bgMean - fgMean) * (bgMean - fgMean);
if (variance > maxVariance)
{
maxVariance = variance;
bestThreshold = t;
}
}
thresholds.Add((byte)bestThreshold);
// 騾貞ス貞蜑イ蟾ヲ蜿ウ荳、蜊
int leftLayers = layers / 2;
int rightLayers = layers - leftLayers;
RecursiveOtsu(histogram, low, bestThreshold, leftLayers, thresholds);
RecursiveOtsu(histogram, bestThreshold + 1, high, rightLayers, thresholds);
}
/// <summary>
/// 隶。邂玲ッ丞アら噪霎灘轣ー蠎ヲ蛟?
/// </summary>
private static byte[] ComputeLayerValues(byte[] thresholds, int layers, string outputMode)
{
var values = new byte[layers];
if (outputMode == "EqualSpaced")
{
// 遲蛾龍霍晁セ灘0, 255/(n-1), 2*255/(n-1), ..., 255
for (int i = 0; i < layers; i++)
values[i] = (byte)Math.Clamp((int)(255.0 * i / (layers - 1)), 0, 255);
}
else // MidValue
{
// 豈丞アょ叙蛹コ髣エ荳ュ蛟?
values[0] = (byte)(thresholds.Length > 0 ? thresholds[0] / 2 : 128);
for (int i = 1; i < layers - 1; i++)
values[i] = (byte)((thresholds[i - 1] + thresholds[i]) / 2);
values[layers - 1] = (byte)(thresholds.Length > 0 ? (thresholds[^1] + 255) / 2 : 128);
}
return values;
}
/// <summary>
/// 譬ケ謐ョ髦亥€シ謨ー扈。ョ螳壼ワ邏謇€螻槫アらコ?
/// </summary>
private static int GetLayerIndex(byte pixel, byte[] thresholds)
{
for (int i = 0; i < thresholds.Length; i++)
{
if (pixel < thresholds[i])
return i;
}
return thresholds.Length;
}
}
@@ -0,0 +1,172 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? ContrastProcessor.cs
// 讛膩: 撖寞摨西摮琜憓𧼮撩撖寞摨?
// :
// - 蝥踵批笆瘥𥪜漲䔶漁摨西?
// - 芸𢆡撖寞摨行隡?
// - CLAHE嚗笆瘥𥪜漲湔䲮銵∪嚗?
// - 憭𡁶撖寞摨血撘箸䲮瘜?
// 蝞埈: 蝥踵孵㦛LAHE
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 撖寞摨西摮?
/// </summary>
public class ContrastProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ContrastProcessor>();
public ContrastProcessor()
{
Name = LocalizationHelper.GetString("ContrastProcessor_Name");
Description = LocalizationHelper.GetString("ContrastProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Contrast", new ProcessorParameter(
"Contrast",
LocalizationHelper.GetString("ContrastProcessor_Contrast"),
typeof(double),
1.0,
0.1,
3.0,
LocalizationHelper.GetString("ContrastProcessor_Contrast_Desc")));
Parameters.Add("Brightness", new ProcessorParameter(
"Brightness",
LocalizationHelper.GetString("ContrastProcessor_Brightness"),
typeof(int),
0,
-100,
100,
LocalizationHelper.GetString("ContrastProcessor_Brightness_Desc")));
Parameters.Add("AutoContrast", new ProcessorParameter(
"AutoContrast",
LocalizationHelper.GetString("ContrastProcessor_AutoContrast"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContrastProcessor_AutoContrast_Desc")));
Parameters.Add("UseCLAHE", new ProcessorParameter(
"UseCLAHE",
LocalizationHelper.GetString("ContrastProcessor_UseCLAHE"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContrastProcessor_UseCLAHE_Desc")));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("ContrastProcessor_ClipLimit"),
typeof(double),
2.0,
1.0,
10.0,
LocalizationHelper.GetString("ContrastProcessor_ClipLimit_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double contrast = GetParameter<double>("Contrast");
int brightness = GetParameter<int>("Brightness");
bool autoContrast = GetParameter<bool>("AutoContrast");
bool useCLAHE = GetParameter<bool>("UseCLAHE");
double clipLimit = GetParameter<double>("ClipLimit");
var result = inputImage.Clone();
if (useCLAHE)
{
result = ApplyCLAHE(inputImage, clipLimit);
}
else if (autoContrast)
{
result = AutoContrastStretch(inputImage);
}
else
{
result = inputImage * contrast + brightness;
}
_logger.Debug("Process: Contrast = {contrast},Brightness = {brightness}," +
"AutoContrast = {autoContrast},UseCLAHE = {useCLAHE}, ClipLimit = {clipLimit}", contrast, brightness, autoContrast, useCLAHE, clipLimit);
return result;
}
private Image<Gray, byte> AutoContrastStretch(Image<Gray, byte> inputImage)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(inputImage, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (minVal == 0 && maxVal == 255)
{
return inputImage.Clone();
}
var floatImage = inputImage.Convert<Gray, float>();
if (maxVal > minVal)
{
floatImage = (floatImage - minVal) * (255.0 / (maxVal - minVal));
}
_logger.Debug("AutoContrastStretch");
return floatImage.Convert<Gray, byte>();
}
private Image<Gray, byte> ApplyCLAHE(Image<Gray, byte> inputImage, double clipLimit)
{
int tileSize = 8;
int width = inputImage.Width;
int height = inputImage.Height;
int tilesX = (width + tileSize - 1) / tileSize;
int tilesY = (height + tileSize - 1) / tileSize;
var result = new Image<Gray, byte>(width, height);
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int x = tx * tileSize;
int y = ty * tileSize;
int w = Math.Min(tileSize, width - x);
int h = Math.Min(tileSize, height - y);
var roi = new System.Drawing.Rectangle(x, y, w, h);
inputImage.ROI = roi;
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
var equalizedTile = new Image<Gray, byte>(tile.Size);
CvInvoke.EqualizeHist(tile, equalizedTile);
result.ROI = roi;
equalizedTile.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
tile.Dispose();
equalizedTile.Dispose();
}
}
_logger.Debug("ApplyCLAHE");
return result;
}
}
@@ -0,0 +1,100 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? GammaProcessor.cs
// æè¿°: Gamma校正算å­ï¼Œç”¨äºŽè°ƒæ•´å›¾åƒäº®åº¦å’Œå¯¹æ¯”åº?
// 功能:
// - Gammaéžçº¿æ€§æ ¡æ­?
// - 增益调整
// - 使用查找表(LUT)加速处ç?
// - é€‚ç”¨äºŽå›¾åƒæ˜¾ç¤ºå’Œäº®åº¦è°ƒæ•´
// 算法: Gammaæ ¡æ­£å…¬å¼ output = (input^(1/gamma)) * gain
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// Gamma校正算å­
/// </summary>
public class GammaProcessor : ImageProcessorBase
{
private byte[] _lookupTable;
private static readonly ILogger _logger = Log.ForContext<GammaProcessor>();
public GammaProcessor()
{
Name = LocalizationHelper.GetString("GammaProcessor_Name");
Description = LocalizationHelper.GetString("GammaProcessor_Description");
_lookupTable = new byte[256];
}
protected override void InitializeParameters()
{
Parameters.Add("Gamma", new ProcessorParameter(
"Gamma",
LocalizationHelper.GetString("GammaProcessor_Gamma"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("GammaProcessor_Gamma_Desc")));
Parameters.Add("Gain", new ProcessorParameter(
"Gain",
LocalizationHelper.GetString("GammaProcessor_Gain"),
typeof(double),
1.0,
0.1,
3.0,
LocalizationHelper.GetString("GammaProcessor_Gain_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double gamma = GetParameter<double>("Gamma");
double gain = GetParameter<double>("Gain");
BuildLookupTable(gamma, gain);
var result = inputImage.Clone();
ApplyLookupTable(result);
_logger.Debug("Process:Gamma = {0}, Gain = {1}", gamma, gain);
return result;
}
private void BuildLookupTable(double gamma, double gain)
{
double invGamma = 1.0 / gamma;
for (int i = 0; i < 256; i++)
{
double normalized = i / 255.0;
double corrected = Math.Pow(normalized, invGamma) * gain;
int value = (int)(corrected * 255.0);
_lookupTable[i] = (byte)Math.Max(0, Math.Min(255, value));
}
_logger.Debug("Gamma and gain values recorded: gamma = {Gamma}, gain = {Gain}", gamma, gain);
}
private void ApplyLookupTable(Image<Gray, byte> image)
{
int width = image.Width;
int height = image.Height;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = image.Data[y, x, 0];
image.Data[y, x, 0] = _lookupTable[pixelValue];
}
}
}
}
@@ -0,0 +1,549 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? HDREnhancementProcessor.cs
// æè¿°: 高动æ€èŒƒå›´ï¼ˆHDR)图åƒå¢žå¼ºç®—å­?
// 功能:
// - 局部色调映射(Local Tone Mapping�
// - 自适应对数映射(Adaptive Logarithmic Mapping�
// - Drago色调映射
// - åŒè¾¹æ»¤æ³¢è‰²è°ƒæ˜ å°„
// - å¢žå¼ºå›¾åƒæš—部和亮部细èŠ?
// 算法: 基于色调映射的HDR增强
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 高动æ€èŒƒå›´å›¾åƒå¢žå¼ºç®—å­?
/// </summary>
public class HDREnhancementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HDREnhancementProcessor>();
public HDREnhancementProcessor()
{
Name = LocalizationHelper.GetString("HDREnhancementProcessor_Name");
Description = LocalizationHelper.GetString("HDREnhancementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HDREnhancementProcessor_Method"),
typeof(string),
"LocalToneMap",
null,
null,
LocalizationHelper.GetString("HDREnhancementProcessor_Method_Desc"),
new string[] { "LocalToneMap", "AdaptiveLog", "Drago", "BilateralToneMap" }));
Parameters.Add("Gamma", new ProcessorParameter(
"Gamma",
LocalizationHelper.GetString("HDREnhancementProcessor_Gamma"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Gamma_Desc")));
Parameters.Add("Saturation", new ProcessorParameter(
"Saturation",
LocalizationHelper.GetString("HDREnhancementProcessor_Saturation"),
typeof(double),
1.0,
0.0,
3.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Saturation_Desc")));
Parameters.Add("DetailBoost", new ProcessorParameter(
"DetailBoost",
LocalizationHelper.GetString("HDREnhancementProcessor_DetailBoost"),
typeof(double),
1.5,
0.0,
5.0,
LocalizationHelper.GetString("HDREnhancementProcessor_DetailBoost_Desc")));
Parameters.Add("SigmaSpace", new ProcessorParameter(
"SigmaSpace",
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaSpace"),
typeof(double),
20.0,
1.0,
100.0,
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaSpace_Desc")));
Parameters.Add("SigmaColor", new ProcessorParameter(
"SigmaColor",
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaColor"),
typeof(double),
30.0,
1.0,
100.0,
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaColor_Desc")));
Parameters.Add("Bias", new ProcessorParameter(
"Bias",
LocalizationHelper.GetString("HDREnhancementProcessor_Bias"),
typeof(double),
0.85,
0.0,
1.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Bias_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double gamma = GetParameter<double>("Gamma");
double saturation = GetParameter<double>("Saturation");
double detailBoost = GetParameter<double>("DetailBoost");
double sigmaSpace = GetParameter<double>("SigmaSpace");
double sigmaColor = GetParameter<double>("SigmaColor");
double bias = GetParameter<double>("Bias");
Image<Gray, byte> result;
switch (method)
{
case "AdaptiveLog":
result = AdaptiveLogarithmicMapping(inputImage, gamma, bias);
break;
case "Drago":
result = DragoToneMapping(inputImage, gamma, bias);
break;
case "BilateralToneMap":
result = BilateralToneMapping(inputImage, gamma, sigmaSpace, sigmaColor, detailBoost);
break;
default: // LocalToneMap
result = LocalToneMapping(inputImage, gamma, sigmaSpace, detailBoost, saturation);
break;
}
_logger.Debug("Process: Method={Method}, Gamma={Gamma}, Saturation={Saturation}, DetailBoost={DetailBoost}, SigmaSpace={SigmaSpace}, SigmaColor={SigmaColor}, Bias={Bias}",
method, gamma, saturation, detailBoost, sigmaSpace, sigmaColor, bias);
return result;
}
/// <summary>
/// 局部色调映�
/// 将图åƒåˆ†è§£ä¸ºåŸºç¡€å±‚(光照)和细节层,分别处ç†åŽåˆæˆ?
/// Base = GaussianBlur(log(I))
/// Detail = log(I) - Base
/// Output = exp(Base_compressed + Detail * boost)
/// </summary>
private Image<Gray, byte> LocalToneMapping(Image<Gray, byte> inputImage,
double gamma, double sigmaSpace, double detailBoost, double saturation)
{
int width = inputImage.Width;
int height = inputImage.Height;
// 转æ¢ä¸ºæµ®ç‚¹å¹¶å½’一化到 (0, 1]
var floatImage = inputImage.Convert<Gray, float>();
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] = floatImage.Data[y, x, 0] / 255.0f + 0.001f;
// 对数�
var logImage = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
// 基础层:大尺度高斯模糊æå–光照分é‡?
int kernelSize = (int)(sigmaSpace * 6) | 1;
if (kernelSize < 3) kernelSize = 3;
var baseLayer = new Image<Gray, float>(width, height);
CvInvoke.GaussianBlur(logImage, baseLayer, new System.Drawing.Size(kernelSize, kernelSize), sigmaSpace);
// 细节�
var detailLayer = logImage - baseLayer;
// 压缩基础层的动æ€èŒƒå›?
double baseMin = double.MaxValue, baseMax = double.MinValue;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float v = baseLayer.Data[y, x, 0];
if (v < baseMin) baseMin = v;
if (v > baseMax) baseMax = v;
}
}
double baseRange = baseMax - baseMin;
if (baseRange < 0.001) baseRange = 0.001;
// 目标动æ€èŒƒå›´ï¼ˆå¯¹æ•°åŸŸï¼‰
double targetRange = Math.Log(256.0);
double compressionFactor = targetRange / baseRange;
var compressedBase = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float normalized = (float)((baseLayer.Data[y, x, 0] - baseMin) / baseRange);
compressedBase.Data[y, x, 0] = (float)(normalized * targetRange + Math.Log(0.01));
}
}
// åˆæˆï¼šåŽ‹ç¼©åŽçš„基础å±?+ 增强的细节层
var combined = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float val = compressedBase.Data[y, x, 0] + detailLayer.Data[y, x, 0] * (float)detailBoost;
combined.Data[y, x, 0] = val;
}
}
// æŒ‡æ•°å˜æ¢å›žçº¿æ€§åŸŸ
var linearResult = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)Math.Exp(combined.Data[y, x, 0]);
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
double maxVal = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (linearResult.Data[y, x, 0] > maxVal) maxVal = linearResult.Data[y, x, 0];
if (maxVal > 0)
{
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
double normalized = linearResult.Data[y, x, 0] / maxVal;
linearResult.Data[y, x, 0] = (float)(Math.Pow(normalized, invGamma) * maxVal);
}
}
}
// 饱和度增强(对比度微调)
if (Math.Abs(saturation - 1.0) > 0.01)
{
double mean = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
mean += linearResult.Data[y, x, 0];
mean /= (width * height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
double diff = linearResult.Data[y, x, 0] - mean;
linearResult.Data[y, x, 0] = (float)(mean + diff * saturation);
}
}
// 归一化到 [0, 255]
var result = NormalizeToByteImage(linearResult);
floatImage.Dispose();
logImage.Dispose();
baseLayer.Dispose();
detailLayer.Dispose();
compressedBase.Dispose();
combined.Dispose();
linearResult.Dispose();
return result;
}
/// <summary>
/// 自适应对数映射
/// æ ¹æ®åœºæ™¯çš„æ•´ä½“亮度自适应调整对数映射曲线
/// L_out = (log(1 + L_in) / log(1 + L_max)) ^ (1/gamma)
/// 使用局部自适应:L_max æ ¹æ®é‚»åŸŸè®¡ç®—
/// </summary>
private Image<Gray, byte> AdaptiveLogarithmicMapping(Image<Gray, byte> inputImage,
double gamma, double bias)
{
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
// 归一化到 [0, 1]
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] /= 255.0f;
// 计算全局最大亮�
float globalMax = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (floatImage.Data[y, x, 0] > globalMax)
globalMax = floatImage.Data[y, x, 0];
if (globalMax < 0.001f) globalMax = 0.001f;
// 计算对数平å‡äº®åº¦
double logAvg = 0;
int count = 0;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float v = floatImage.Data[y, x, 0];
if (v > 0.001f)
{
logAvg += Math.Log(v);
count++;
}
}
}
logAvg = Math.Exp(logAvg / Math.Max(count, 1));
// 自适应对数映射
// bias 控制暗部和亮部的平衡
double logBase = Math.Log(2.0 + 8.0 * Math.Pow(logAvg / globalMax, Math.Log(bias) / Math.Log(0.5)));
var result = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float lum = floatImage.Data[y, x, 0];
double mapped = Math.Log(1.0 + lum) / logBase;
result.Data[y, x, 0] = (float)mapped;
}
}
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result.Data[y, x, 0] = (float)Math.Pow(Math.Max(0, result.Data[y, x, 0]), invGamma);
}
var byteResult = NormalizeToByteImage(result);
floatImage.Dispose();
result.Dispose();
return byteResult;
}
/// <summary>
/// Drago色调映射
/// 使用自适应对数基底进行色调映射
/// L_out = log_base(1 + L_in) / log_base(1 + L_max)
/// base = 2 + 8 * (L_in / L_max) ^ (ln(bias) / ln(0.5))
/// </summary>
private Image<Gray, byte> DragoToneMapping(Image<Gray, byte> inputImage,
double gamma, double bias)
{
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
// 归一化到 [0, 1]
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] /= 255.0f;
// 全局最大亮�
float maxLum = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (floatImage.Data[y, x, 0] > maxLum)
maxLum = floatImage.Data[y, x, 0];
if (maxLum < 0.001f) maxLum = 0.001f;
double biasP = Math.Log(bias) / Math.Log(0.5);
double divider = Math.Log10(1.0 + maxLum);
if (divider < 0.001) divider = 0.001;
var result = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float lum = floatImage.Data[y, x, 0];
// 自适应对数基底
double adaptBase = 2.0 + 8.0 * Math.Pow(lum / maxLum, biasP);
double logAdapt = Math.Log(1.0 + lum) / Math.Log(adaptBase);
double mapped = logAdapt / divider;
result.Data[y, x, 0] = (float)Math.Max(0, Math.Min(1.0, mapped));
}
}
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result.Data[y, x, 0] = (float)Math.Pow(result.Data[y, x, 0], invGamma);
}
var byteResult = NormalizeToByteImage(result);
floatImage.Dispose();
result.Dispose();
return byteResult;
}
/// <summary>
/// åŒè¾¹æ»¤æ³¢è‰²è°ƒæ˜ å°„
/// 使用åŒè¾¹æ»¤æ³¢åˆ†ç¦»åŸºç¡€å±‚和细节å±?
/// åŒè¾¹æ»¤æ³¢ä¿è¾¹ç‰¹æ€§ä½¿å¾—细节层更加精确
/// </summary>
private Image<Gray, byte> BilateralToneMapping(Image<Gray, byte> inputImage,
double gamma, double sigmaSpace, double sigmaColor, double detailBoost)
{
int width = inputImage.Width;
int height = inputImage.Height;
// 转æ¢ä¸ºæµ®ç‚¹å¹¶å–对æ•?
var floatImage = inputImage.Convert<Gray, float>();
var logImage = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0] / 255.0f + 0.001);
// åŒè¾¹æ»¤æ³¢æå–基础层(ä¿è¾¹å¹³æ»‘ï¼?
int diameter = (int)(sigmaSpace * 2) | 1;
if (diameter < 3) diameter = 3;
if (diameter > 31) diameter = 31;
var baseLayer = new Image<Gray, float>(width, height);
// 转æ¢ä¸?byte 进行åŒè¾¹æ»¤æ³¢ï¼Œå†è½¬å›ž float
var logNorm = NormalizeToByteImage(logImage);
var baseNorm = new Image<Gray, byte>(width, height);
CvInvoke.BilateralFilter(logNorm, baseNorm, diameter, sigmaColor, sigmaSpace);
// 将基础层转回浮点对数域
double logMin = double.MaxValue, logMax = double.MinValue;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float v = logImage.Data[y, x, 0];
if (v < logMin) logMin = v;
if (v > logMax) logMax = v;
}
double logRange = logMax - logMin;
if (logRange < 0.001) logRange = 0.001;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
baseLayer.Data[y, x, 0] = (float)(baseNorm.Data[y, x, 0] / 255.0 * logRange + logMin);
// 细节å±?= å¯¹æ•°å›¾åƒ - 基础å±?
var detailLayer = logImage - baseLayer;
// 压缩基础�
double baseMin = double.MaxValue, baseMax = double.MinValue;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float v = baseLayer.Data[y, x, 0];
if (v < baseMin) baseMin = v;
if (v > baseMax) baseMax = v;
}
double bRange = baseMax - baseMin;
if (bRange < 0.001) bRange = 0.001;
double targetRange = Math.Log(256.0);
double compression = targetRange / bRange;
// åˆæˆ
var combined = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float compBase = (float)((baseLayer.Data[y, x, 0] - baseMin) * compression + Math.Log(0.01));
combined.Data[y, x, 0] = compBase + detailLayer.Data[y, x, 0] * (float)detailBoost;
}
// æŒ‡æ•°å˜æ¢å›žçº¿æ€§åŸŸ
var linearResult = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)Math.Exp(combined.Data[y, x, 0]);
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
double maxVal = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (linearResult.Data[y, x, 0] > maxVal) maxVal = linearResult.Data[y, x, 0];
if (maxVal > 0)
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)(Math.Pow(linearResult.Data[y, x, 0] / maxVal, invGamma) * maxVal);
}
var result = NormalizeToByteImage(linearResult);
floatImage.Dispose();
logImage.Dispose();
logNorm.Dispose();
baseNorm.Dispose();
baseLayer.Dispose();
detailLayer.Dispose();
combined.Dispose();
linearResult.Dispose();
return result;
}
/// <summary>
/// 归一化浮点图åƒåˆ°å­—节图åƒ
/// </summary>
private Image<Gray, byte> NormalizeToByteImage(Image<Gray, float> floatImage)
{
double minVal = double.MaxValue;
double maxVal = double.MinValue;
for (int y = 0; y < floatImage.Height; y++)
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
if (val < minVal) minVal = val;
if (val > maxVal) maxVal = val;
}
var result = new Image<Gray, byte>(floatImage.Size);
double range = maxVal - minVal;
if (range > 0)
{
for (int y = 0; y < floatImage.Height; y++)
for (int x = 0; x < floatImage.Width; x++)
{
int normalized = (int)((floatImage.Data[y, x, 0] - minVal) / range * 255.0);
result.Data[y, x, 0] = (byte)Math.Max(0, Math.Min(255, normalized));
}
}
return result;
}
}
@@ -0,0 +1,212 @@
// ============================================================================
// Copyright ツゥ 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 譁サカ蜷? HierarchicalEnhancementProcessor.cs
// 謠剰ソー: 螻よャ。蠅槫シコ邂怜ュ撰シ悟渕莠主、壼ーコ蠎ヲ鬮俶民蛻ァ」蟇ケ荳榊酔蟆コ蠎ヲ扈鰍迢ャ遶句「槫シ?
// 蜉溯:
// - 蟆崟蜒丞隗」荳コ螟壼アらサ鰍螻?+ 蝓コ遑€螻?
// - 蟇ケ豈丞アらサ鰍迢ャ遶区而蛻カ蠅樒?
// - 謾ッ謖∝渕遑€螻ゆコョ蠎ヲ隹紛蜥悟ッケ豈泌コヲ髯仙?
// 邂玲ウ: 螟壼ーコ蠎ヲ鬮俶民蟾ョ蛻隗」荳朱㍾蟒コ
// 菴懆€? 譚惹シ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 螻よャ。蠅槫シコ邂怜ュ撰シ悟渕莠主、壼ーコ蠎ヲ鬮俶民蟾ョ蛻ッケ荳榊酔蟆コ蠎ヲ逧崟蜒冗サ鰍霑幄。檎峡遶句「槫シコ
/// </summary>
public class HierarchicalEnhancementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HierarchicalEnhancementProcessor>();
public HierarchicalEnhancementProcessor()
{
Name = LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Name");
Description = LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Levels", new ProcessorParameter(
"Levels",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Levels"),
typeof(int),
4,
2,
8,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Levels_Desc")));
Parameters.Add("FineGain", new ProcessorParameter(
"FineGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_FineGain"),
typeof(double),
2.0,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_FineGain_Desc")));
Parameters.Add("MediumGain", new ProcessorParameter(
"MediumGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_MediumGain"),
typeof(double),
1.5,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_MediumGain_Desc")));
Parameters.Add("CoarseGain", new ProcessorParameter(
"CoarseGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_CoarseGain"),
typeof(double),
1.0,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_CoarseGain_Desc")));
Parameters.Add("BaseGain", new ProcessorParameter(
"BaseGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_BaseGain"),
typeof(double),
1.0,
0.0,
3.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_BaseGain_Desc")));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_ClipLimit"),
typeof(double),
0.0,
0.0,
50.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_ClipLimit_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int levels = GetParameter<int>("Levels");
double fineGain = GetParameter<double>("FineGain");
double mediumGain = GetParameter<double>("MediumGain");
double coarseGain = GetParameter<double>("CoarseGain");
double baseGain = GetParameter<double>("BaseGain");
double clipLimit = GetParameter<double>("ClipLimit");
_logger.Debug("Process: Levels={Levels}, Fine={Fine}, Medium={Medium}, Coarse={Coarse}, Base={Base}, Clip={Clip}",
levels, fineGain, mediumGain, coarseGain, baseGain, clipLimit);
int h = inputImage.Height;
int w = inputImage.Width;
// === 螟壼ーコ蠎ヲ鬮俶民蟾ョ蛻隗」驛ィ蝨ィ蜴溷ァ句霎ィ邇ク頑桃菴懶シ梧裏髴€驥大ュ怜。比ク贋ク矩㊦譬キ ===
// 逕ィ騾貞「 sigma 逧ォ俶民讓。邉顔函謌仙ケウ貊大アょコ丞哦0(蜴溷崟), G1, G2, ..., G_n(蝓コ遑€螻?
// 扈鰍螻?D_i = G_i - G_{i+1}
// 驥榊サコ嗤utput = sum(D_i * gain_i) + G_n * baseGain
// 隶。邂玲ッ丞アら噪鬮俶?sigma域欠謨ー騾貞「橸シ?
var sigmas = new double[levels];
for (int i = 0; i < levels; i++)
sigmas[i] = Math.Pow(2, i + 1); // 2, 4, 8, 16, ...
// 逕滓蟷ウ貊大アょコ丞loat 謨ー扈シ碁∩蜈?Emgu float Image 逧琉鬚假シ
var smoothLayers = new float[levels + 1][]; // [0]=蜴溷崟, [1..n]=鬮俶民讓。邉
smoothLayers[0] = new float[h * w];
var srcData = inputImage.Data;
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
smoothLayers[0][row + x] = srcData[y, x, 0];
});
for (int i = 0; i < levels; i++)
{
int ksize = ((int)(sigmas[i] * 3)) | 1; // 遑ョ菫晏・
if (ksize < 3) ksize = 3;
using var src = new Image<Gray, byte>(w, h);
// 莉惹ク贋ク€螻?float 霓?byte 蛛夐ォ俶民讓。邉?
var prevLayer = smoothLayers[i];
var sd = src.Data;
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
sd[y, x, 0] = (byte)Math.Clamp((int)Math.Round(prevLayer[row + x]), 0, 255);
});
using var dst = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(src, dst, new System.Drawing.Size(ksize, ksize), sigmas[i]);
smoothLayers[i + 1] = new float[h * w];
var dd = dst.Data;
var nextLayer = smoothLayers[i + 1];
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
nextLayer[row + x] = dd[y, x, 0];
});
}
// === 隶。邂怜「樒寢謠貞€シ蟷カ逶エ謗・驥榊サコ ===
var gains = new double[levels];
for (int i = 0; i < levels; i++)
{
double t = levels <= 1 ? 0.0 : (double)i / (levels - 1);
if (t <= 0.5)
{
double t2 = t * 2.0;
gains[i] = fineGain * (1.0 - t2) + mediumGain * t2;
}
else
{
double t2 = (t - 0.5) * 2.0;
gains[i] = mediumGain * (1.0 - t2) + coarseGain * t2;
}
}
// 驥榊サコ嗤utput = baseGain * G_n + sum(gain_i * (G_i - G_{i+1}))
float fBaseGain = (float)baseGain;
float fClip = (float)clipLimit;
var baseLayerData = smoothLayers[levels];
var result = new Image<Gray, byte>(w, h);
var resultData = result.Data;
// 鬚スャ謐?gains 荳?float
var fGains = new float[levels];
for (int i = 0; i < levels; i++)
fGains[i] = (float)gains[i];
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
{
int idx = row + x;
float val = baseLayerData[idx] * fBaseGain;
for (int i = 0; i < levels; i++)
{
float detail = smoothLayers[i][idx] - smoothLayers[i + 1][idx];
detail *= fGains[i];
if (fClip > 0)
detail = Math.Clamp(detail, -fClip, fClip);
val += detail;
}
resultData[y, x, 0] = (byte)Math.Clamp((int)Math.Round(val), 0, 255);
}
});
_logger.Debug("Process completed: {Levels} levels, output={W}x{H}", levels, w, h);
return result;
}
}
@@ -0,0 +1,142 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? HistogramEqualizationProcessor.cs
// æè¿°: 直方图å‡è¡¡åŒ–ç®—å­ï¼Œç”¨äºŽå¢žå¼ºå›¾åƒå¯¹æ¯”度
// 功能:
// - 全局直方图å‡è¡¡åŒ–
// - 自适应直方图å‡è¡¡åŒ–(CLAHEï¼?
// - é™åˆ¶å¯¹æ¯”度增å¼?
// - 改善图åƒçš„æ•´ä½“对比度
// 算法: 直方图å‡è¡¡åŒ–ã€CLAHE
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 直方图å‡è¡¡åŒ–ç®—å­
/// </summary>
public class HistogramEqualizationProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HistogramEqualizationProcessor>();
public HistogramEqualizationProcessor()
{
Name = LocalizationHelper.GetString("HistogramEqualizationProcessor_Name");
Description = LocalizationHelper.GetString("HistogramEqualizationProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HistogramEqualizationProcessor_Method"),
typeof(string),
"Global",
null,
null,
LocalizationHelper.GetString("HistogramEqualizationProcessor_Method_Desc"),
new string[] { "Global", "CLAHE" }));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("HistogramEqualizationProcessor_ClipLimit"),
typeof(double),
2.0,
1.0,
10.0,
LocalizationHelper.GetString("HistogramEqualizationProcessor_ClipLimit_Desc")));
Parameters.Add("TileSize", new ProcessorParameter(
"TileSize",
LocalizationHelper.GetString("HistogramEqualizationProcessor_TileSize"),
typeof(int),
8,
4,
32,
LocalizationHelper.GetString("HistogramEqualizationProcessor_TileSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double clipLimit = GetParameter<double>("ClipLimit");
int tileSize = GetParameter<int>("TileSize");
Image<Gray, byte> result;
if (method == "CLAHE")
{
result = ApplyCLAHE(inputImage, clipLimit, tileSize);
}
else // Global
{
result = new Image<Gray, byte>(inputImage.Size);
CvInvoke.EqualizeHist(inputImage, result);
}
_logger.Debug("Process: Method = {Method}, ClipLimit = {ClipLimit}, TileSize = {TileSize}",
method, clipLimit, tileSize);
return result;
}
private Image<Gray, byte> ApplyCLAHE(Image<Gray, byte> inputImage, double clipLimit, int tileSize)
{
int width = inputImage.Width;
int height = inputImage.Height;
int tilesX = (width + tileSize - 1) / tileSize;
int tilesY = (height + tileSize - 1) / tileSize;
var result = new Image<Gray, byte>(width, height);
// 对æ¯ä¸ªtile进行直方图å‡è¡¡åŒ–
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int x = tx * tileSize;
int y = ty * tileSize;
int w = Math.Min(tileSize, width - x);
int h = Math.Min(tileSize, height - y);
var roi = new System.Drawing.Rectangle(x, y, w, h);
inputImage.ROI = roi;
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
// 应用直方图å‡è¡¡åŒ–
var equalizedTile = new Image<Gray, byte>(tile.Size);
CvInvoke.EqualizeHist(tile, equalizedTile);
// 应用é™åˆ¶ï¼ˆç®€åŒ–版本)
var floatTile = tile.Convert<Gray, float>();
var floatEqualized = equalizedTile.Convert<Gray, float>();
var diff = floatEqualized - floatTile;
var limited = floatTile + diff * Math.Min(clipLimit / 10.0, 1.0);
var limitedByte = limited.Convert<Gray, byte>();
// å¤åˆ¶åˆ°ç»“果图åƒ?
result.ROI = roi;
limitedByte.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
tile.Dispose();
equalizedTile.Dispose();
floatTile.Dispose();
floatEqualized.Dispose();
diff.Dispose();
limited.Dispose();
limitedByte.Dispose();
}
}
return result;
}
}
@@ -0,0 +1,267 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? HistogramOverlayProcessor.cs
// 讛膩: 湔䲮摮琜霈∠啣漲湔䲮曉僎隞亥嗅㦛蝏睃𨅯㦛誩椰銝𡃏
// :
// - 霈∠颲枏摨衣凒孵㦛
// - 撠孵㦛蝏睃銝箄𤩺梁𠶖撌虫閫?
// - 颲枏枂湔䲮霈∟”潭㺭?
// 蝞埈: 啣漲湔䲮霈?+ 敶抵𠧧
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using System.Text;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 湔䲮摮琜霈∠啣漲湔䲮曉僎隞亥嗅㦛蝏睃𨅯㦛誩椰銝𡃏霈∟”?
/// </summary>
public class HistogramOverlayProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HistogramOverlayProcessor>();
//
private const int ChartWidth = 256; // 梁𠶖曉躹摰賢漲
private const int ChartHeight = 200; // 梁𠶖曉躹擃睃漲
private const int AxisMarginLeft = 50; // Y頧湔蝑暸坔捐摨?
private const int AxisMarginBottom = 25; // X頧湔蝑暸摨?
private const int Padding = 8; // 峕艶憸嘥器頝?
private const int PaddingRight = 25; // 喃儒憸嘥器頝嘅摰寧熙X頧湔錰撠曉摨行摮梹
private const int Margin = 10; // 頝嘥㦛誩椰銝𡃏颲寡
private const float BgAlpha = 0.6f;
private const double FontScale = 0.35;
private const int FontThickness = 1;
public HistogramOverlayProcessor()
{
Name = LocalizationHelper.GetString("HistogramOverlayProcessor_Name");
Description = LocalizationHelper.GetString("HistogramOverlayProcessor_Description");
}
protected override void InitializeParameters()
{
// 惩虾靚?
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int h = inputImage.Height;
int w = inputImage.Width;
var srcData = inputImage.Data;
// === 1. 霈∠啣漲湔䲮?===
var hist = new int[256];
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
hist[srcData[y, x, 0]]++;
int maxCount = 0;
long totalPixels = (long)h * w;
for (int i = 0; i < 256; i++)
if (hist[i] > maxCount) maxCount = hist[i];
// === 2. 霈∠蝏蠘恣靽⊥ ===
double mean = 0, variance = 0;
int minVal = 255, maxVal = 0;
int modeVal = 0, modeCount = 0;
long medianTarget = totalPixels / 2, cumulative = 0;
int medianVal = 0;
bool medianFound = false;
for (int i = 0; i < 256; i++)
{
if (hist[i] > 0)
{
if (i < minVal) minVal = i;
if (i > maxVal) maxVal = i;
}
if (hist[i] > modeCount) { modeCount = hist[i]; modeVal = i; }
mean += (double)i * hist[i];
cumulative += hist[i];
if (!medianFound && cumulative >= medianTarget) { medianVal = i; medianFound = true; }
}
mean /= totalPixels;
for (int i = 0; i < 256; i++)
variance += hist[i] * (i - mean) * (i - mean);
variance /= totalPixels;
double stdDev = Math.Sqrt(variance);
// === 3. 颲枏枂銵冽聢唳旿 ===
var sb = new StringBuilder();
sb.AppendLine("=== 啣漲湔䲮霈?===");
sb.AppendLine($"撠箏站: {w} x {h}");
sb.AppendLine($"蝝䭾㺭: {totalPixels}");
sb.AppendLine($"撠讐摨? {minVal}");
sb.AppendLine($"憭抒摨? {maxVal}");
sb.AppendLine($"撟喳啣漲: {mean:F2}");
sb.AppendLine($"銝凋啣漲: {medianVal}");
sb.AppendLine($"隡埈㺭啣漲: {modeVal} (箇緵 {modeCount} 甈?");
sb.AppendLine($"撌? {stdDev:F2}");
sb.AppendLine();
sb.AppendLine("啣漲墦t豹t(%)");
for (int i = 0; i < 256; i++)
{
if (hist[i] > 0)
sb.AppendLine($"{i}\t{hist[i]}\t{(double)hist[i] / totalPixels * 100.0:F4}");
}
OutputData["HistogramTable"] = sb.ToString();
OutputData["Histogram"] = hist;
// === 4. 敶抵𠧧嗅㦛 + XY頧游 ===
var colorImage = inputImage.Convert<Bgr, byte>();
var colorData = colorImage.Data;
// 撣嚗朞臬躹?Padding + Y頧湔蝑?+ 蝏睃㦛?+ Padding嚗偌撟喉
// Padding + 蝏睃㦛?+ X頧湔蝑?+ Padding嚗
int totalW = Padding + AxisMarginLeft + ChartWidth + PaddingRight;
int totalH = Padding + ChartHeight + AxisMarginBottom + Padding;
int bgW = Math.Min(totalW, w - Margin);
int bgH = Math.Min(totalH, h - Margin);
if (bgW > Padding + AxisMarginLeft && bgH > Padding + AxisMarginBottom)
{
int plotW = Math.Min(ChartWidth, bgW - Padding - AxisMarginLeft - PaddingRight);
int plotH = Math.Min(ChartHeight, bgH - Padding - AxisMarginBottom - Padding);
if (plotW <= 0 || plotH <= 0) goto SkipOverlay;
// 蝏睃㦛箏椰銝𡃏典㦛譍葉?
int plotX0 = Margin + Padding + AxisMarginLeft;
int plotY0 = Margin + Padding;
// 霈∠瘥誩
double binWidth = (double)plotW / 256.0;
var barHeights = new int[plotW];
for (int px = 0; px < plotW; px++)
{
int bin = Math.Min((int)(px / binWidth), 255);
barHeights[px] = maxCount > 0 ? (int)((long)hist[bin] * (plotH - 1) / maxCount) : 0;
}
float alpha = BgAlpha;
float inv = 1.0f - alpha;
// 蝏睃𢠃𤩺暺𤏸𠧧峕艶嚗𡝗㟲銝芸躹笔鉄頧游器頝嘅
Parallel.For(0, bgH, dy =>
{
int imgY = Margin + dy;
if (imgY >= h) return;
for (int dx = 0; dx < bgW; dx++)
{
int imgX = Margin + dx;
if (imgX >= w) break;
colorData[imgY, imgX, 0] = (byte)(int)(colorData[imgY, imgX, 0] * inv);
colorData[imgY, imgX, 1] = (byte)(int)(colorData[imgY, imgX, 1] * inv);
colorData[imgY, imgX, 2] = (byte)(int)(colorData[imgY, imgX, 2] * inv);
}
});
// 蝏睃肽𠧧梁𠶖?
Parallel.For(0, plotH, dy =>
{
int imgY = plotY0 + dy;
if (imgY >= h) return;
int rowFromBottom = plotH - 1 - dy;
for (int dx = 0; dx < plotW; dx++)
{
int imgX = plotX0 + dx;
if (imgX >= w) break;
if (rowFromBottom < barHeights[dx])
{
byte curB = colorData[imgY, imgX, 0];
byte curG = colorData[imgY, imgX, 1];
byte curR = colorData[imgY, imgX, 2];
colorData[imgY, imgX, 0] = (byte)Math.Clamp(curB + (int)(255 * alpha), 0, 255);
colorData[imgY, imgX, 1] = (byte)Math.Clamp(curG + (int)(50 * alpha), 0, 255);
colorData[imgY, imgX, 2] = (byte)Math.Clamp(curR + (int)(50 * alpha), 0, 255);
}
}
});
// === 5. 蝏睃頧渡瑪摨行瘜?===
var white = new MCvScalar(255, 255, 255);
var gray = new MCvScalar(180, 180, 180);
// Y頧渡瑪
CvInvoke.Line(colorImage,
new Point(plotX0, plotY0),
new Point(plotX0, plotY0 + plotH),
white, 1);
// X頧渡瑪
CvInvoke.Line(colorImage,
new Point(plotX0, plotY0 + plotH),
new Point(plotX0 + plotW, plotY0 + plotH),
white, 1);
// X頧游摨? 0, 64, 128, 192, 255
int[] xTicks = { 0, 64, 128, 192, 255 };
foreach (int tick in xTicks)
{
int tx = plotX0 + (int)(tick * binWidth);
if (tx >= w) break;
CvInvoke.Line(colorImage,
new Point(tx, plotY0 + plotH),
new Point(tx, plotY0 + plotH + 4),
white, 1);
string label = tick.ToString();
CvInvoke.PutText(colorImage, label,
new Point(tx - 8, plotY0 + plotH + 18),
FontFace.HersheySimplex, FontScale, white, FontThickness);
}
// Y頧游摨? 0%, 25%, 50%, 75%, 100%
for (int i = 0; i <= 4; i++)
{
int val = maxCount * i / 4;
int ty = plotY0 + plotH - (int)((long)plotH * i / 4);
CvInvoke.Line(colorImage,
new Point(plotX0 - 4, ty),
new Point(plotX0, ty),
white, 1);
// 蝵烐聢𡁶瑪
if (i > 0 && i < 4)
{
for (int gx = plotX0 + 2; gx < plotX0 + plotW; gx += 6)
{
int gxEnd = Math.Min(gx + 2, plotX0 + plotW);
CvInvoke.Line(colorImage,
new Point(gx, ty),
new Point(gxEnd, ty),
gray, 1);
}
}
string label = FormatCount(val);
CvInvoke.PutText(colorImage, label,
new Point(Margin + Padding, ty + 4),
FontFace.HersheySimplex, FontScale, white, FontThickness);
}
}
SkipOverlay:
OutputData["PseudoColorImage"] = colorImage;
_logger.Debug("Process completed: histogram overlay, mean={Mean:F2}, stdDev={Std:F2}", mean, stdDev);
return inputImage.Clone();
}
/// <summary>
/// 蝝㰘恣唬蛹蝝批摮㛖泵銝莎憒?12345 ?"12.3K"嚗?
/// </summary>
private static string FormatCount(int count)
{
if (count >= 1_000_000) return $"{count / 1_000_000.0:F1}M";
if (count >= 1_000) return $"{count / 1_000.0:F1}K";
return count.ToString();
}
}
@@ -0,0 +1,320 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? RetinexProcessor.cs
// æè¿°: 基于Retinex的多尺度阴影校正算å­
// 功能:
// - å•尺度Retinex (SSR)
// - 多尺度Retinex (MSR)
// - 带色彩æ¢å¤çš„多尺度Retinex (MSRCR)
// - 光照ä¸å‡åŒ€æ ¡æ­£
// - 阴影去除
// 算法: Retinexç†è®º - 将图åƒåˆ†è§£ä¸ºå射分é‡å’Œå…‰ç…§åˆ†é‡?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// Retinex多尺度阴影校正算�
/// </summary>
public class RetinexProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<RetinexProcessor>();
public RetinexProcessor()
{
Name = LocalizationHelper.GetString("RetinexProcessor_Name");
Description = LocalizationHelper.GetString("RetinexProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("RetinexProcessor_Method"),
typeof(string),
"MSR",
null,
null,
LocalizationHelper.GetString("RetinexProcessor_Method_Desc"),
new string[] { "SSR", "MSR", "MSRCR" }));
Parameters.Add("Sigma1", new ProcessorParameter(
"Sigma1",
LocalizationHelper.GetString("RetinexProcessor_Sigma1"),
typeof(double),
15.0,
1.0,
100.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma1_Desc")));
Parameters.Add("Sigma2", new ProcessorParameter(
"Sigma2",
LocalizationHelper.GetString("RetinexProcessor_Sigma2"),
typeof(double),
80.0,
1.0,
200.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma2_Desc")));
Parameters.Add("Sigma3", new ProcessorParameter(
"Sigma3",
LocalizationHelper.GetString("RetinexProcessor_Sigma3"),
typeof(double),
250.0,
1.0,
500.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma3_Desc")));
Parameters.Add("Gain", new ProcessorParameter(
"Gain",
LocalizationHelper.GetString("RetinexProcessor_Gain"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("RetinexProcessor_Gain_Desc")));
Parameters.Add("Offset", new ProcessorParameter(
"Offset",
LocalizationHelper.GetString("RetinexProcessor_Offset"),
typeof(int),
0,
-100,
100,
LocalizationHelper.GetString("RetinexProcessor_Offset_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double sigma1 = GetParameter<double>("Sigma1");
double sigma2 = GetParameter<double>("Sigma2");
double sigma3 = GetParameter<double>("Sigma3");
double gain = GetParameter<double>("Gain");
int offset = GetParameter<int>("Offset");
Image<Gray, byte> result;
if (method == "SSR")
{
// å•尺度Retinex
result = SingleScaleRetinex(inputImage, sigma2, gain, offset);
}
else if (method == "MSR")
{
// 多尺度Retinex
result = MultiScaleRetinex(inputImage, new[] { sigma1, sigma2, sigma3 }, gain, offset);
}
else // MSRCR
{
// 带色彩æ¢å¤çš„多尺度Retinex
result = MultiScaleRetinexCR(inputImage, new[] { sigma1, sigma2, sigma3 }, gain, offset);
}
_logger.Debug("Process: Method = {Method}, Sigma1 = {Sigma1}, Sigma2 = {Sigma2}, Sigma3 = {Sigma3}, Gain = {Gain}, Offset = {Offset}",
method, sigma1, sigma2, sigma3, gain, offset);
return result;
}
/// <summary>
/// å•尺度Retinex (SSR)
/// R(x,y) = log(I(x,y)) - log(I(x,y) * G(x,y))
/// </summary>
private Image<Gray, byte> SingleScaleRetinex(Image<Gray, byte> inputImage, double sigma, double gain, int offset)
{
// 转æ¢ä¸ºæµ®ç‚¹å›¾åƒå¹¶æ·»åŠ å°å¸¸æ•°é¿å…log(0)
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
floatImage = floatImage + 1.0f;
// 计算log(I)
Image<Gray, float> logImage = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
}
}
// 高斯模糊得到光照分é‡
Image<Gray, float> blurred = new Image<Gray, float>(inputImage.Size);
int kernelSize = (int)(sigma * 6) | 1; // ç¡®ä¿ä¸ºå¥‡æ•?
if (kernelSize < 3) kernelSize = 3;
CvInvoke.GaussianBlur(floatImage, blurred, new System.Drawing.Size(kernelSize, kernelSize), sigma);
// 计算log(I * G)
Image<Gray, float> logBlurred = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logBlurred.Data[y, x, 0] = (float)Math.Log(blurred.Data[y, x, 0]);
}
}
// R = log(I) - log(I*G)
Image<Gray, float> retinex = logImage - logBlurred;
// 应用增益和åç§?
retinex = retinex * gain + offset;
// 归一化到0-255
Image<Gray, byte> result = NormalizeToByteImage(retinex);
floatImage.Dispose();
logImage.Dispose();
blurred.Dispose();
logBlurred.Dispose();
retinex.Dispose();
return result;
}
/// <summary>
/// 多尺度Retinex (MSR)
/// MSR = Σ(w_i * SSR_i) / N
/// </summary>
private Image<Gray, byte> MultiScaleRetinex(Image<Gray, byte> inputImage, double[] sigmas, double gain, int offset)
{
// 转æ¢ä¸ºæµ®ç‚¹å›¾åƒ?
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
floatImage = floatImage + 1.0f;
// 计算log(I)
Image<Gray, float> logImage = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
}
}
// 累加多个尺度的结�
Image<Gray, float> msrResult = new Image<Gray, float>(inputImage.Size);
msrResult.SetZero();
foreach (double sigma in sigmas)
{
// 高斯模糊
Image<Gray, float> blurred = new Image<Gray, float>(inputImage.Size);
int kernelSize = (int)(sigma * 6) | 1;
if (kernelSize < 3) kernelSize = 3;
CvInvoke.GaussianBlur(floatImage, blurred, new System.Drawing.Size(kernelSize, kernelSize), sigma);
// 计算log(I*G)
Image<Gray, float> logBlurred = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logBlurred.Data[y, x, 0] = (float)Math.Log(blurred.Data[y, x, 0]);
}
}
// 累加 SSR
msrResult = msrResult + (logImage - logBlurred);
blurred.Dispose();
logBlurred.Dispose();
}
// å¹³å
msrResult = msrResult / sigmas.Length;
// 应用增益和åç§?
msrResult = msrResult * gain + offset;
// 归一�
Image<Gray, byte> result = NormalizeToByteImage(msrResult);
floatImage.Dispose();
logImage.Dispose();
msrResult.Dispose();
return result;
}
/// <summary>
/// 带色彩æ¢å¤çš„多尺度Retinex (MSRCR)
/// 对于ç°åº¦å›¾åƒï¼Œä½¿ç”¨ç®€åŒ–版æœ?
/// </summary>
private Image<Gray, byte> MultiScaleRetinexCR(Image<Gray, byte> inputImage, double[] sigmas, double gain, int offset)
{
// 先执行MSR
Image<Gray, byte> msrResult = MultiScaleRetinex(inputImage, sigmas, gain, offset);
// 对于ç°åº¦å›¾åƒï¼Œè‰²å½©æ¢å¤ç®€åŒ–为对比度增å¼?
Image<Gray, float> floatMsr = msrResult.Convert<Gray, float>();
Image<Gray, float> floatInput = inputImage.Convert<Gray, float>();
// 简å•的色彩æ¢å¤ï¼šå¢žå¼ºå±€éƒ¨å¯¹æ¯”度
Image<Gray, float> enhanced = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
float msr = floatMsr.Data[y, x, 0];
float original = floatInput.Data[y, x, 0];
// 色彩æ¢å¤å› å­
float c = (float)Math.Log(original + 1.0) / (float)Math.Log(128.0);
enhanced.Data[y, x, 0] = msr * c;
}
}
Image<Gray, byte> result = NormalizeToByteImage(enhanced);
msrResult.Dispose();
floatMsr.Dispose();
floatInput.Dispose();
enhanced.Dispose();
return result;
}
/// <summary>
/// 归一化浮点图åƒåˆ°å­—节图åƒ
/// </summary>
private Image<Gray, byte> NormalizeToByteImage(Image<Gray, float> floatImage)
{
// 找到最å°å€¼å’Œæœ€å¤§å€?
double minVal = double.MaxValue;
double maxVal = double.MinValue;
for (int y = 0; y < floatImage.Height; y++)
{
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
if (val < minVal) minVal = val;
if (val > maxVal) maxVal = val;
}
}
// 归一化到0-255
Image<Gray, byte> result = new Image<Gray, byte>(floatImage.Size);
double range = maxVal - minVal;
if (range > 0)
{
for (int y = 0; y < floatImage.Height; y++)
{
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
int normalized = (int)((val - minVal) / range * 255.0);
result.Data[y, x, 0] = (byte)Math.Max(0, Math.Min(255, normalized));
}
}
}
return result;
}
}
@@ -0,0 +1,141 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? SharpenProcessor.cs
// æè¿°: é”化算å­ï¼Œç”¨äºŽå¢žå¼ºå›¾åƒè¾¹ç¼˜å’Œç»†èŠ‚
// 功能:
// - 拉普拉斯é”化
// - éžé”化掩蔽(Unsharp Maskingï¼?
// - å¯è°ƒèŠ‚é”化强åº?
// - 支æŒå¤šç§é”化æ ?
// 算法: 拉普拉斯算å­ã€éžé”化掩蔽
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// é”化算å­
/// </summary>
public class SharpenProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SharpenProcessor>();
public SharpenProcessor()
{
Name = LocalizationHelper.GetString("SharpenProcessor_Name");
Description = LocalizationHelper.GetString("SharpenProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("SharpenProcessor_Method"),
typeof(string),
"Laplacian",
null,
null,
LocalizationHelper.GetString("SharpenProcessor_Method_Desc"),
new string[] { "Laplacian", "UnsharpMask" }));
Parameters.Add("Strength", new ProcessorParameter(
"Strength",
LocalizationHelper.GetString("SharpenProcessor_Strength"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("SharpenProcessor_Strength_Desc")));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("SharpenProcessor_KernelSize"),
typeof(int),
3,
1,
15,
LocalizationHelper.GetString("SharpenProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double strength = GetParameter<double>("Strength");
int kernelSize = GetParameter<int>("KernelSize");
if (kernelSize % 2 == 0) kernelSize++;
Image<Gray, byte> result;
if (method == "UnsharpMask")
{
result = ApplyUnsharpMask(inputImage, kernelSize, strength);
}
else // Laplacian
{
result = ApplyLaplacianSharpening(inputImage, strength);
}
_logger.Debug("Process: Method = {Method}, Strength = {Strength}, KernelSize = {KernelSize}",
method, strength, kernelSize);
return result;
}
private Image<Gray, byte> ApplyLaplacianSharpening(Image<Gray, byte> inputImage, double strength)
{
// 计算拉普拉斯算å­
var laplacian = new Image<Gray, float>(inputImage.Size);
CvInvoke.Laplacian(inputImage, laplacian, DepthType.Cv32F, 1);
// 转æ¢ä¸ºå­—节类åž?
var laplacianByte = laplacian.Convert<Gray, byte>();
// 将拉普拉斯结果加到原图上进行é”化
var floatImage = inputImage.Convert<Gray, float>();
var sharpened = floatImage + laplacian * strength;
// é™åˆ¶èŒƒå›´å¹¶è½¬æ¢å›žå­—节类型
var result = sharpened.Convert<Gray, byte>();
laplacian.Dispose();
laplacianByte.Dispose();
floatImage.Dispose();
sharpened.Dispose();
return result;
}
private Image<Gray, byte> ApplyUnsharpMask(Image<Gray, byte> inputImage, int kernelSize, double strength)
{
// 创建模糊图åƒ
var blurred = new Image<Gray, byte>(inputImage.Size);
CvInvoke.GaussianBlur(inputImage, blurred,
new System.Drawing.Size(kernelSize, kernelSize), 0);
// 计算差异(细节)
var floatInput = inputImage.Convert<Gray, float>();
var floatBlurred = blurred.Convert<Gray, float>();
var detail = floatInput - floatBlurred;
// 将细节加回原�
var sharpened = floatInput + detail * strength;
// 转æ¢å›žå­—节类åž?
var result = sharpened.Convert<Gray, byte>();
blurred.Dispose();
floatInput.Dispose();
floatBlurred.Dispose();
detail.Dispose();
sharpened.Dispose();
return result;
}
}
@@ -0,0 +1,127 @@
// ============================================================================
// Copyright © 2016-2025 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? SubPixelZoomProcessor.cs
// æè¿°: 亚åƒç´ æ”¾å¤§ç®—å­ï¼Œé€šè¿‡é«˜è´¨é‡æ’值实现图åƒçš„亚åƒç´ çº§æ”¾å¤§
// 功能:
// - 支æŒä»»æ„å€çŽ‡æ”¾å¤§ï¼ˆå«å°æ•°å€çއå¦?1.5xã€?.3xï¼?
// - å¤šç§æ’值方法(最近邻ã€åŒçº¿æ€§ã€åŒä¸‰æ¬¡ã€Lanczosï¼?
// - å¯é€‰é”化补å¿ï¼ˆæŠµæ¶ˆæ’值模糊)
// - å¯é€‰æŒ‡å®šè¾“出尺å¯?
// 算法: 基于 OpenCV Resize çš„é«˜è´¨é‡æ’值放å¤?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 亚åƒç´ æ”¾å¤§ç®—å­?
/// </summary>
public class SubPixelZoomProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SubPixelZoomProcessor>();
public SubPixelZoomProcessor()
{
Name = LocalizationHelper.GetString("SubPixelZoomProcessor_Name");
Description = LocalizationHelper.GetString("SubPixelZoomProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("ScaleFactor", new ProcessorParameter(
"ScaleFactor",
LocalizationHelper.GetString("SubPixelZoomProcessor_ScaleFactor"),
typeof(double),
2.0,
1.0,
16.0,
LocalizationHelper.GetString("SubPixelZoomProcessor_ScaleFactor_Desc")));
Parameters.Add("Interpolation", new ProcessorParameter(
"Interpolation",
LocalizationHelper.GetString("SubPixelZoomProcessor_Interpolation"),
typeof(string),
"Lanczos",
null,
null,
LocalizationHelper.GetString("SubPixelZoomProcessor_Interpolation_Desc"),
new string[] { "Nearest", "Bilinear", "Bicubic", "Lanczos" }));
Parameters.Add("SharpenAfter", new ProcessorParameter(
"SharpenAfter",
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenAfter"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenAfter_Desc")));
Parameters.Add("SharpenStrength", new ProcessorParameter(
"SharpenStrength",
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenStrength"),
typeof(double),
0.5,
0.1,
3.0,
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenStrength_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double scaleFactor = GetParameter<double>("ScaleFactor");
string interpolation = GetParameter<string>("Interpolation");
bool sharpenAfter = GetParameter<bool>("SharpenAfter");
double sharpenStrength = GetParameter<double>("SharpenStrength");
Inter interMethod = interpolation switch
{
"Nearest" => Inter.Nearest,
"Bilinear" => Inter.Linear,
"Bicubic" => Inter.Cubic,
_ => Inter.Lanczos4
};
int newWidth = (int)Math.Round(inputImage.Width * scaleFactor);
int newHeight = (int)Math.Round(inputImage.Height * scaleFactor);
// ç¡®ä¿æœ€å°å°ºå¯¸ä¸º 1
newWidth = Math.Max(1, newWidth);
newHeight = Math.Max(1, newHeight);
var result = new Image<Gray, byte>(newWidth, newHeight);
CvInvoke.Resize(inputImage, result, new Size(newWidth, newHeight), 0, 0, interMethod);
// é”化补å¿
if (sharpenAfter)
{
// Unsharp Masking: result = result + strength * (result - blur)
int ksize = Math.Max(3, (int)(scaleFactor * 2) | 1); // 奇数�
using var blurred = result.SmoothGaussian(ksize);
for (int y = 0; y < newHeight; y++)
{
for (int x = 0; x < newWidth; x++)
{
float val = result.Data[y, x, 0];
float blur = blurred.Data[y, x, 0];
float sharpened = val + (float)(sharpenStrength * (val - blur));
result.Data[y, x, 0] = (byte)Math.Clamp((int)sharpened, 0, 255);
}
}
}
_logger.Debug("Process: Scale={Scale}, Interp={Interp}, Size={W}x{H}, Sharpen={Sharpen}",
scaleFactor, interpolation, newWidth, newHeight, sharpenAfter);
return result;
}
}
@@ -0,0 +1,319 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? SuperResolutionProcessor.cs
// 讛膩: 瘛勗漲摮虫摮?
// :
// - EDSR ?FSRCNN 頞颲函璅∪嚗㇉NNX 嚗?
// - 2x?x?x 曉之
// - 啣漲芸𢆡頧祆揢銝箔𡁻颲枏嚗峕綫頧砍啣漲
// - 璅∪芸𢆡𦦵揣嚗峕𣈲䌊摰帋頝臬
// - 雿輻鍂 Microsoft.ML.OnnxRuntime 餈𥡝
// 蝞埈: EDSR (Enhanced Deep Residual SR) / FSRCNN (Fast SR CNN)
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Microsoft.ML.OnnxRuntime;
using Microsoft.ML.OnnxRuntime.Tensors;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 瘛勗漲摮虫摮琜EDSR / FSRCNN嚗㚁雿輻鍂 ONNX Runtime
/// </summary>
public class SuperResolutionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SuperResolutionProcessor>();
// 隡朞蝻枏嚗屸憭滚頧?
private static InferenceSession? _cachedSession;
private static string _cachedModelKey = string.Empty;
public SuperResolutionProcessor()
{
Name = LocalizationHelper.GetString("SuperResolutionProcessor_Name");
Description = LocalizationHelper.GetString("SuperResolutionProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Model", new ProcessorParameter(
"Model",
LocalizationHelper.GetString("SuperResolutionProcessor_Model"),
typeof(string),
"FSRCNN",
null,
null,
LocalizationHelper.GetString("SuperResolutionProcessor_Model_Desc"),
new string[] { "EDSR", "FSRCNN" }));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("SuperResolutionProcessor_Scale"),
typeof(string),
"2",
null,
null,
LocalizationHelper.GetString("SuperResolutionProcessor_Scale_Desc"),
new string[] { "2", "3", "4" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string model = GetParameter<string>("Model");
int scale = int.Parse(GetParameter<string>("Scale"));
// 交𪄳璅∪
string modelPath = FindModelFile(model, scale);
if (string.IsNullOrEmpty(modelPath))
{
_logger.Error("Model file not found: {Model}_x{Scale}.onnx", model, scale);
throw new FileNotFoundException(
$"頞颲函璅∪芣𪄳? {model}_x{scale}.onnx\n" +
$"霂瑕璅∪曄蔭唬誑銝衤遙銝:\n" +
$" 1. 蝔见/Models/\n" +
$" 2. 蝔见/\n" +
$"璅∪閬?ONNX n" +
$"臭蝙?tf2onnx 隞?.pb 頧祆揢:\n" +
$" pip install tf2onnx\n" +
$" python -m tf2onnx.convert --input {model}_x{scale}.pb --output {model}_x{scale}.onnx --inputs input:0 --outputs output:0");
}
// 㰘蝸霂?
string modelKey = $"{model}_{scale}";
InferenceSession session;
if (_cachedModelKey == modelKey && _cachedSession != null)
{
session = _cachedSession;
_logger.Debug("Reusing cached session: {ModelKey}", modelKey);
}
else
{
_cachedSession?.Dispose();
var options = new SessionOptions();
options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_ALL;
try
{
options.AppendExecutionProvider_CUDA(0);
_logger.Information("Using CUDA GPU for inference");
}
catch
{
_logger.Warning("CUDA not available, falling back to CPU");
}
session = new InferenceSession(modelPath, options);
_cachedSession = session;
_cachedModelKey = modelKey;
// 霈啣摰鮋雿輻鍂?Execution Provider
var providers = session.ModelMetadata?.CustomMetadataMap;
_logger.Information("Loaded ONNX model: {ModelPath}, Providers: {Providers}",
modelPath, string.Join(", ", session.GetType().Name));
}
int h = inputImage.Height;
int w = inputImage.Width;
_logger.Information("Input image size: {W}x{H}, Model: {Model}, Scale: {Scale}", w, h, model, scale);
// 撖孵之曆蝙埈綫閙活/OOM
const int TileSize = 256;
bool useTiling = (model.StartsWith("EDSR", StringComparison.OrdinalIgnoreCase)) && (h > TileSize || w > TileSize);
if (useTiling)
{
return ProcessTiled(session, inputImage, scale, TileSize);
}
return ProcessSingle(session, inputImage, scale);
}
/// <summary>
/// 閙活 FSRCNN嚗?
/// </summary>
private Image<Gray, byte> ProcessSingle(InferenceSession session, Image<Gray, byte> inputImage, int scale)
{
int h = inputImage.Height;
int w = inputImage.Width;
// 璅∪颲枏靽⊥
string inputName = session.InputMetadata.Keys.First();
var inputMeta = session.InputMetadata[inputName];
int[] dims = inputMeta.Dimensions;
// dims : [1, H, W, C] (NHWC)嚗龦 ?1 ?3
int inputChannels = dims[^1]; // 𦒘蝏湔糓𡁻?
// 遣颲枏 tensor: [1, H, W, C] (NHWC)
// 雿輻鍂摨訫 + Parallel.For 𣂼蝝删揣撘訫
DenseTensor<float> inputTensor;
if (inputChannels == 1)
{
// FSRCNN: 𡁻啣漲颲枏
inputTensor = new DenseTensor<float>(new[] { 1, h, w, 1 });
float[] buf = inputTensor.Buffer.ToArray();
var imgData = inputImage.Data;
Parallel.For(0, h, y =>
{
int rowOffset = y * w;
for (int x = 0; x < w; x++)
buf[rowOffset + x] = imgData[y, x, 0];
});
inputTensor = new DenseTensor<float>(buf, new[] { 1, h, w, 1 });
}
else
{
// EDSR: 銝厰𡁻 BGR 颲枏
using var colorInput = new Image<Bgr, byte>(w, h);
CvInvoke.CvtColor(inputImage, colorInput, ColorConversion.Gray2Bgr);
var buf = new float[h * w * 3];
var imgData = colorInput.Data;
Parallel.For(0, h, y =>
{
int rowOffset = y * w * 3;
for (int x = 0; x < w; x++)
{
int px = rowOffset + x * 3;
buf[px] = imgData[y, x, 0];
buf[px + 1] = imgData[y, x, 1];
buf[px + 2] = imgData[y, x, 2];
}
});
inputTensor = new DenseTensor<float>(buf, new[] { 1, h, w, 3 });
}
//
var inputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor(inputName, inputTensor)
};
using var results = session.Run(inputs);
var outputTensor = results.First().AsTensor<float>();
// 颲枏枂 shape: [1, C, H*scale, W*scale] (NCHW嚗峕芋餈?Transpose)
var shape = outputTensor.Dimensions;
int outC = shape[1];
int outH = shape[2];
int outW = shape[3];
// 頧祆揢銝箇摨血㦛?
// 雿輻鍂 Parallel.For + 湔𦻖
Image<Gray, byte> result;
if (outC == 1)
{
// FSRCNN: 𡁻颲枏枂 [1, 1, outH, outW]
result = new Image<Gray, byte>(outW, outH);
var outData = result.Data;
Parallel.For(0, outH, y =>
{
for (int x = 0; x < outW; x++)
outData[y, x, 0] = (byte)Math.Clamp((int)outputTensor[0, 0, y, x], 0, 255);
});
}
else
{
// EDSR: 銝厰𡁻颲枏枂 [1, 3, outH, outW] ?啣漲
// 湔𦻖霈∠啣漲頝唾銝剝𡢿 BGR
result = new Image<Gray, byte>(outW, outH);
var outData = result.Data;
Parallel.For(0, outH, y =>
{
for (int x = 0; x < outW; x++)
{
float b = outputTensor[0, 0, y, x];
float g = outputTensor[0, 1, y, x];
float r = outputTensor[0, 2, y, x];
// BT.601 啣漲: 0.299*R + 0.587*G + 0.114*B
int gray = (int)(0.299f * r + 0.587f * g + 0.114f * b);
outData[y, x, 0] = (byte)Math.Clamp(gray, 0, 255);
}
});
}
_logger.Debug("ProcessSingle: Scale={Scale}, Output={W}x{H}", scale, outW, outH);
return result;
}
/// <summary>
/// ?EDSR嚗㚁𣂼急綫潭𦻖
/// </summary>
private Image<Gray, byte> ProcessTiled(InferenceSession session, Image<Gray, byte> inputImage, int scale, int tileSize)
{
int h = inputImage.Height;
int w = inputImage.Width;
int overlap = 8; // 撠烐𣄽亥器蝻䀝憚敶?
var result = new Image<Gray, byte>(w * scale, h * scale);
int tilesX = (int)Math.Ceiling((double)w / (tileSize - overlap));
int tilesY = (int)Math.Ceiling((double)h / (tileSize - overlap));
_logger.Information("Tiled processing: {TilesX}x{TilesY} tiles, tileSize={TileSize}", tilesX, tilesY, tileSize);
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int srcX = Math.Min(tx * (tileSize - overlap), w - tileSize);
int srcY = Math.Min(ty * (tileSize - overlap), h - tileSize);
srcX = Math.Max(srcX, 0);
srcY = Math.Max(srcY, 0);
int tw = Math.Min(tileSize, w - srcX);
int th = Math.Min(tileSize, h - srcY);
// 鋆 tile
inputImage.ROI = new System.Drawing.Rectangle(srcX, srcY, tw, th);
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
// 蓥葵 tile
var srTile = ProcessSingle(session, tile, scale);
tile.Dispose();
// 蝏𤘪
int dstX = srcX * scale;
int dstY = srcY * scale;
result.ROI = new System.Drawing.Rectangle(dstX, dstY, srTile.Width, srTile.Height);
srTile.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
srTile.Dispose();
}
}
_logger.Debug("ProcessTiled: Scale={Scale}, Output={W}x{H}", scale, result.Width, result.Height);
return result;
}
/// <summary>
/// 交𪄳璅∪辣嚗峕隡睃蝥扳蝝W銝芰𤌍敶𤏪.onnx 嚗?
/// </summary>
private static string FindModelFile(string model, int scale)
{
string baseDir = AppDomain.CurrentDomain.BaseDirectory;
string fileName = $"{model}_x{scale}.onnx";
string[] searchPaths = new[]
{
Path.Combine(baseDir, "Models", fileName),
Path.Combine(baseDir, fileName),
Path.Combine(Directory.GetCurrentDirectory(), "Models", fileName),
Path.Combine(Directory.GetCurrentDirectory(), fileName),
};
foreach (var path in searchPaths)
{
if (File.Exists(path))
{
_logger.Debug("Found model file: {Path}", path);
return path;
}
}
_logger.Warning("Model file not found: {Model}_x{Scale}.onnx", model, scale);
return string.Empty;
}
}
@@ -0,0 +1,102 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? MorphologyProcessor.cs
// æè¿°: å½¢æ€å­¦å¤„ç†ç®—å­ï¼Œç”¨äºŽäºŒå€¼å›¾åƒçš„å½¢æ€å­¦æ“作
// 功能:
// - è…蚀(Erode):收缩目标区域
// - 膨胀(Dilate):扩张目标区域
// - å¼€è¿ç®—(Open):先è…蚀åŽè†¨èƒ€ï¼Œå޻除å°ç›®æ ‡
// - é—­è¿ç®—(Close):先膨胀åŽè…蚀,填充å°å­”æ´ž
// 算法: 数学形æ€å­¦è¿ç®—
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// å½¢æ€å­¦å¤„ç†ç®—å­
/// </summary>
public class MorphologyProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MorphologyProcessor>();
public MorphologyProcessor()
{
Name = LocalizationHelper.GetString("MorphologyProcessor_Name");
Description = LocalizationHelper.GetString("MorphologyProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Operation", new ProcessorParameter(
"Operation",
LocalizationHelper.GetString("MorphologyProcessor_Operation"),
typeof(string),
"Erode",
null,
null,
LocalizationHelper.GetString("MorphologyProcessor_Operation_Desc"),
new string[] { "Erode", "Dilate", "Open", "Close" }));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("MorphologyProcessor_KernelSize"),
typeof(int),
3,
1,
21,
LocalizationHelper.GetString("MorphologyProcessor_KernelSize_Desc")));
Parameters.Add("Iterations", new ProcessorParameter(
"Iterations",
LocalizationHelper.GetString("MorphologyProcessor_Iterations"),
typeof(int),
1,
1,
10,
LocalizationHelper.GetString("MorphologyProcessor_Iterations_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string operation = GetParameter<string>("Operation");
int kernelSize = GetParameter<int>("KernelSize");
int iterations = GetParameter<int>("Iterations");
var kernel = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(kernelSize, kernelSize), new Point(-1, -1));
var result = inputImage.Clone();
switch (operation)
{
case "Erode":
CvInvoke.Erode(inputImage, result, kernel, new Point(-1, -1),
iterations, BorderType.Default, default);
break;
case "Dilate":
CvInvoke.Dilate(inputImage, result, kernel, new Point(-1, -1),
iterations, BorderType.Default, default);
break;
case "Open":
CvInvoke.MorphologyEx(inputImage, result, MorphOp.Open, kernel,
new Point(-1, -1), iterations, BorderType.Default, default);
break;
case "Close":
CvInvoke.MorphologyEx(inputImage, result, MorphOp.Close, kernel,
new Point(-1, -1), iterations, BorderType.Default, default);
break;
}
_logger.Debug("Process:Operation = {operation},KernelSize = {kernelSize},Iterations = {iterations}", operation, kernelSize, iterations);
return result;
}
}
@@ -0,0 +1,128 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? DifferenceProcessor.cs
// æè¿°: 差分è¿ç®—ç®—å­ï¼Œç”¨äºŽè¾¹ç¼˜æ£€æµ‹å’Œå˜åŒ–检æµ?
// 功能:
// - 对图åƒè¿›è¡Œå·®åˆ†è¿ç®?
// - æ”¯æŒæ°´å¹³ã€åž‚直和对角线差åˆ?
// - å¯ç”¨äºŽè¾¹ç¼˜æ£€æµ?
// - å¯é€‰å½’一化输å‡?
// 算法: åƒç´ çº§å·®åˆ†è¿ç®?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 差分è¿ç®—ç®—å­
/// </summary>
public class DifferenceProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<DifferenceProcessor>();
public DifferenceProcessor()
{
Name = LocalizationHelper.GetString("DifferenceProcessor_Name");
Description = LocalizationHelper.GetString("DifferenceProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Direction", new ProcessorParameter(
"Direction",
LocalizationHelper.GetString("DifferenceProcessor_Direction"),
typeof(string),
"Horizontal",
null,
null,
LocalizationHelper.GetString("DifferenceProcessor_Direction_Desc"),
new string[] { "Horizontal", "Vertical", "Both" }));
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("DifferenceProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("DifferenceProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string direction = GetParameter<string>("Direction");
bool normalize = GetParameter<bool>("Normalize");
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
var result = new Image<Gray, float>(width, height);
if (direction == "Horizontal")
{
// 水平差分: I(x+1,y) - I(x,y)
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width - 1; x++)
{
result.Data[y, x, 0] = floatImage.Data[y, x + 1, 0] - floatImage.Data[y, x, 0];
}
result.Data[y, width - 1, 0] = 0;
}
}
else if (direction == "Vertical")
{
// 垂直差分: I(x,y+1) - I(x,y)
for (int y = 0; y < height - 1; y++)
{
for (int x = 0; x < width; x++)
{
result.Data[y, x, 0] = floatImage.Data[y + 1, x, 0] - floatImage.Data[y, x, 0];
}
}
for (int x = 0; x < width; x++)
{
result.Data[height - 1, x, 0] = 0;
}
}
else // Both
{
// 梯度幅� sqrt((dx)^2 + (dy)^2)
for (int y = 0; y < height - 1; y++)
{
for (int x = 0; x < width - 1; x++)
{
float dx = floatImage.Data[y, x + 1, 0] - floatImage.Data[y, x, 0];
float dy = floatImage.Data[y + 1, x, 0] - floatImage.Data[y, x, 0];
result.Data[y, x, 0] = (float)Math.Sqrt(dx * dx + dy * dy);
}
}
}
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
floatImage.Dispose();
_logger.Debug("Process: Direction = {Direction}, Normalize = {Normalize}", direction, normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,90 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? DivisionProcessor.cs
// æè¿°: 除法è¿ç®—ç®—å­ï¼Œç”¨äºŽå›¾åƒå½’一化处ç?
// 功能:
// - 对图åƒåƒç´ å€¼è¿›è¡Œé™¤æ³•è¿ç®?
// - 支æŒç¼©æ”¾å› å­è°ƒæ•´
// - å¯é€‰å½’一化到0-255范围
// - 常用于背景校正和图åƒå½’一åŒ?
// 算法: åƒç´ çº§é™¤æ³•è¿ç®?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 除法è¿ç®—ç®—å­
/// </summary>
public class DivisionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<DivisionProcessor>();
public DivisionProcessor()
{
Name = LocalizationHelper.GetString("DivisionProcessor_Name");
Description = LocalizationHelper.GetString("DivisionProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Divisor", new ProcessorParameter(
"Divisor",
LocalizationHelper.GetString("DivisionProcessor_Divisor"),
typeof(double),
2.0,
0.01,
255.0,
LocalizationHelper.GetString("DivisionProcessor_Divisor_Desc")));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("DivisionProcessor_Scale"),
typeof(double),
1.0,
0.1,
10.0,
LocalizationHelper.GetString("DivisionProcessor_Scale_Desc")));
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("DivisionProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("DivisionProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double divisor = GetParameter<double>("Divisor");
double scale = GetParameter<double>("Scale");
bool normalize = GetParameter<bool>("Normalize");
var floatImage = inputImage.Convert<Gray, float>();
var result = floatImage / divisor * scale;
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
_logger.Debug("Process:Divisor = {0}, Scale = {1}, Normalize = {2}", divisor, scale, normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,95 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? IntegralProcessor.cs
// æè¿°: 积分è¿ç®—ç®—å­ï¼Œè®¡ç®—积分图åƒ?
// 功能:
// - 计算积分图åƒï¼ˆç´¯åŠ å’Œï¼?
// - 用于快速区域求�
// - 支æŒå½’一化输å‡?
// 算法: 积分图åƒç®—法
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 积分è¿ç®—ç®—å­
/// </summary>
public class IntegralProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<IntegralProcessor>();
public IntegralProcessor()
{
Name = LocalizationHelper.GetString("IntegralProcessor_Name");
Description = LocalizationHelper.GetString("IntegralProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("IntegralProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("IntegralProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
bool normalize = GetParameter<bool>("Normalize");
int width = inputImage.Width;
int height = inputImage.Height;
// 使用double类型é¿å…溢出
var integralImage = new Image<Gray, double>(width, height);
// 计算积分图åƒ
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
double sum = inputImage.Data[y, x, 0];
if (x > 0)
sum += integralImage.Data[y, x - 1, 0];
if (y > 0)
sum += integralImage.Data[y - 1, x, 0];
if (x > 0 && y > 0)
sum -= integralImage.Data[y - 1, x - 1, 0];
integralImage.Data[y, x, 0] = sum;
}
}
var result = integralImage.Convert<Gray, float>();
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
integralImage.Dispose();
_logger.Debug("Process: Normalize = {Normalize}", normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,88 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? MultiplicationProcessor.cs
// æè¿°: 乘法è¿ç®—ç®—å­ï¼Œç”¨äºŽå›¾åƒå¢žå¼?
// 功能:
// - 对图åƒåƒç´ å€¼è¿›è¡Œä¹˜æ³•è¿ç®?
// - 支æŒå¢žç›Šè°ƒæ•´
// - å¯é€‰å½’一化输å‡?
// - 常用于图åƒå¢žå¼ºå’Œå¯¹æ¯”度调æ•?
// 算法: åƒç´ çº§ä¹˜æ³•è¿ç®?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 乘法è¿ç®—ç®—å­
/// </summary>
public class MultiplicationProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MultiplicationProcessor>();
public MultiplicationProcessor()
{
Name = LocalizationHelper.GetString("MultiplicationProcessor_Name");
Description = LocalizationHelper.GetString("MultiplicationProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Multiplier", new ProcessorParameter(
"Multiplier",
LocalizationHelper.GetString("MultiplicationProcessor_Multiplier"),
typeof(double),
2.0,
0.1,
10.0,
LocalizationHelper.GetString("MultiplicationProcessor_Multiplier_Desc")));
Parameters.Add("Normalize", new ProcessorParameter(
"Normalize",
LocalizationHelper.GetString("MultiplicationProcessor_Normalize"),
typeof(bool),
true,
null,
null,
LocalizationHelper.GetString("MultiplicationProcessor_Normalize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double multiplier = GetParameter<double>("Multiplier");
bool normalize = GetParameter<bool>("Normalize");
var floatImage = inputImage.Convert<Gray, float>();
var result = floatImage * multiplier;
if (normalize)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
}
else
{
// ä¸å½’一化时,直接截断到0-255范围
result = result.ThresholdBinary(new Gray(255), new Gray(255));
}
floatImage.Dispose();
_logger.Debug("Process: Multiplier = {Multiplier}, Normalize = {Normalize}", multiplier, normalize);
return result.Convert<Gray, byte>();
}
}
@@ -0,0 +1,65 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? OrProcessor.cs
// æè¿°: 或è¿ç®—ç®—å­ï¼Œç”¨äºŽå›¾åƒé€»è¾‘è¿ç®—
// 功能:
// - 对图åƒè¿›è¡ŒæŒ‰ä½æˆ–è¿ç®—
// - 支æŒä¸Žå›ºå®šå€¼æˆ–è¿ç®—
// - å¯ç”¨äºŽå›¾åƒåˆå¹¶å’ŒæŽ©ç æ“作
// 算法: åƒç´ çº§æŒ‰ä½æˆ–è¿ç®—
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 或è¿ç®—ç®—å­?
/// </summary>
public class OrProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<OrProcessor>();
public OrProcessor()
{
Name = LocalizationHelper.GetString("OrProcessor_Name");
Description = LocalizationHelper.GetString("OrProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Value", new ProcessorParameter(
"Value",
LocalizationHelper.GetString("OrProcessor_Value"),
typeof(int),
0,
0,
255,
LocalizationHelper.GetString("OrProcessor_Value_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int value = GetParameter<int>("Value");
var result = inputImage.Clone();
// 对æ¯ä¸ªåƒç´ è¿›è¡ŒæŒ‰ä½æˆ–è¿ç®—
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
result.Data[y, x, 0] = (byte)(inputImage.Data[y, x, 0] | value);
}
}
_logger.Debug("Process: Value = {Value}", value);
return result;
}
}
@@ -0,0 +1,87 @@
// ============================================================================
// 文件å? AngleMeasurementProcessor.cs
// æè¿°: 角度测é‡ç®—å­ â€?共端点的两æ¡ç›´çº¿å¤¹è§’
// 功能:
// - 用户定义三个点:端点(顶点)ã€å°„çº?终点ã€å°„çº?终点
// - 计算两æ¡å°„线之间的夹角(0°~180°ï¼?
// - 在图åƒä¸Šç»˜åˆ¶ä¸¤æ¡å°„线ã€è§’度弧线和标注
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
public class AngleMeasurementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<AngleMeasurementProcessor>();
public AngleMeasurementProcessor()
{
Name = LocalizationHelper.GetString("AngleMeasurementProcessor_Name");
Description = LocalizationHelper.GetString("AngleMeasurementProcessor_Description");
}
protected override void InitializeParameters()
{
// ä¸‰ä¸ªç‚¹åæ ‡ï¼ˆç”±äº¤äº’控件注入,使用 double é¿å…å–æ•´è¯¯å·®ï¼?
Parameters.Add("VX", new ProcessorParameter("VX", "VX", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("VY", new ProcessorParameter("VY", "VY", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("AX", new ProcessorParameter("AX", "AX", typeof(double), 100.0, null, null, "") { IsVisible = false });
Parameters.Add("AY", new ProcessorParameter("AY", "AY", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("BX", new ProcessorParameter("BX", "BX", typeof(double), 250.0, null, null, "") { IsVisible = false });
Parameters.Add("BY", new ProcessorParameter("BY", "BY", typeof(double), 100.0, null, null, "") { IsVisible = false });
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double vx = GetParameter<double>("VX"), vy = GetParameter<double>("VY");
double ax = GetParameter<double>("AX"), ay = GetParameter<double>("AY");
double bx = GetParameter<double>("BX"), by = GetParameter<double>("BY");
OutputData.Clear();
// å‘é‡ VA å’?VB
double vax = ax - vx, vay = ay - vy;
double vbx = bx - vx, vby = by - vy;
double lenA = Math.Sqrt(vax * vax + vay * vay);
double lenB = Math.Sqrt(vbx * vbx + vby * vby);
double angleDeg = 0;
if (lenA > 0.001 && lenB > 0.001)
{
double dot = vax * vbx + vay * vby;
double cosAngle = Math.Clamp(dot / (lenA * lenB), -1.0, 1.0);
angleDeg = Math.Acos(cosAngle) * 180.0 / Math.PI;
}
// 计算角度弧的起始角和扫过角(用于绘制弧线�
double angleA = Math.Atan2(vay, vax) * 180.0 / Math.PI;
double angleB = Math.Atan2(vby, vbx) * 180.0 / Math.PI;
// ç¡®ä¿ä»?angleA åˆ?angleB çš„æ‰«è¿‡æ–¹å‘æ˜¯è¾ƒå°çš„夹è§?
double sweep = angleB - angleA;
if (sweep > 180) sweep -= 360;
if (sweep < -180) sweep += 360;
string angleText = $"{angleDeg:F2} deg";
OutputData["AngleMeasurementResult"] = true;
OutputData["Vertex"] = new Point((int)Math.Round(vx), (int)Math.Round(vy));
OutputData["PointA"] = new Point((int)Math.Round(ax), (int)Math.Round(ay));
OutputData["PointB"] = new Point((int)Math.Round(bx), (int)Math.Round(by));
OutputData["AngleDeg"] = angleDeg;
OutputData["ArcStartAngle"] = angleA;
OutputData["ArcSweepAngle"] = sweep;
OutputData["AngleText"] = angleText;
_logger.Information("AngleMeasurement: Angle={Angle}, V=({VX},{VY}), A=({AX},{AY}), B=({BX},{BY})",
angleText, vx, vy, ax, ay, bx, by);
return inputImage.Clone();
}
}
@@ -0,0 +1,403 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? BgaVoidRateProcessor.cs
// æè¿°: BGA 空洞率检测算å­ï¼ˆä¸¤æ­¥è‡ªåŠ¨æ£€æµ‹æ³•ï¼?
//
// å¤„ç†æµç¨‹:
// 第一æ­?â€?焊çƒå®šä½: 高斯模糊 â†?Otsuåå‘二值化 â†?é—­è¿ç®?â†?轮廓检æµ?â†?圆度过滤 â†?椭圆拟åˆ
// 第二æ­?â€?气泡检æµ? 焊çƒè½®å»“æŽ©ç  â†?åŒé˜ˆå€¼åˆ†å‰?â†?轮廓检æµ?â†?é¢ç§¯è¿‡æ»¤ â†?气泡率计ç®?
//
// 支æŒå¤šè¾¹å½¢ROIé™å®šæ£€æµ‹åŒºåŸŸï¼Œæ”¯æŒIPC-7095标准PASS/FAIL判定
// 正片模å¼ï¼šç„Šç?暗区域,气泡=亮区åŸ?
//
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
public class BgaVoidRateProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<BgaVoidRateProcessor>();
public BgaVoidRateProcessor()
{
Name = LocalizationHelper.GetString("BgaVoidRateProcessor_Name");
Description = LocalizationHelper.GetString("BgaVoidRateProcessor_Description");
}
protected override void InitializeParameters()
{
// ── ROIé™å®šåŒºåŸŸ ──
Parameters.Add("RoiMode", new ProcessorParameter(
"RoiMode",
LocalizationHelper.GetString("BgaVoidRateProcessor_RoiMode"),
typeof(string), "None", null, null,
LocalizationHelper.GetString("BgaVoidRateProcessor_RoiMode_Desc"),
new string[] { "None", "Polygon" }));
// 多边形ROIç‚¹æ•°å’Œåæ ‡ï¼ˆç”±UI注入,ä¸å¯è§ï¼Œæœ€å¤šæ”¯æŒ?2个点ï¼?
Parameters.Add("PolyCount", new ProcessorParameter("PolyCount", "PolyCount", typeof(int), 0, null, null, "") { IsVisible = false });
for (int i = 0; i < 32; i++)
{
Parameters.Add($"PolyX{i}", new ProcessorParameter($"PolyX{i}", $"PolyX{i}", typeof(int), 0, null, null, "") { IsVisible = false });
Parameters.Add($"PolyY{i}", new ProcessorParameter($"PolyY{i}", $"PolyY{i}", typeof(int), 0, null, null, "") { IsVisible = false });
}
// ── 第一步:BGA定ä½å‚æ•° ──
Parameters.Add("BgaMinArea", new ProcessorParameter(
"BgaMinArea",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMinArea"),
typeof(int), 500, 10, 1000000,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMinArea_Desc")));
Parameters.Add("BgaMaxArea", new ProcessorParameter(
"BgaMaxArea",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMaxArea"),
typeof(int), 500000, 100, 10000000,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaMaxArea_Desc")));
Parameters.Add("BgaBlurSize", new ProcessorParameter(
"BgaBlurSize",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaBlurSize"),
typeof(int), 5, 1, 31,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaBlurSize_Desc")));
Parameters.Add("BgaCircularity", new ProcessorParameter(
"BgaCircularity",
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaCircularity"),
typeof(double), 0.5, 0.0, 1.0,
LocalizationHelper.GetString("BgaVoidRateProcessor_BgaCircularity_Desc")));
// ── ç¬¬äºŒæ­¥ï¼šæ°”æ³¡æ£€æµ‹å‚æ•?──
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold",
LocalizationHelper.GetString("BgaVoidRateProcessor_MinThreshold"),
typeof(int), 128, 0, 255,
LocalizationHelper.GetString("BgaVoidRateProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold",
LocalizationHelper.GetString("BgaVoidRateProcessor_MaxThreshold"),
typeof(int), 255, 0, 255,
LocalizationHelper.GetString("BgaVoidRateProcessor_MaxThreshold_Desc")));
Parameters.Add("MinVoidArea", new ProcessorParameter(
"MinVoidArea",
LocalizationHelper.GetString("BgaVoidRateProcessor_MinVoidArea"),
typeof(int), 10, 1, 10000,
LocalizationHelper.GetString("BgaVoidRateProcessor_MinVoidArea_Desc")));
Parameters.Add("VoidLimit", new ProcessorParameter(
"VoidLimit",
LocalizationHelper.GetString("BgaVoidRateProcessor_VoidLimit"),
typeof(double), 25.0, 0.0, 100.0,
LocalizationHelper.GetString("BgaVoidRateProcessor_VoidLimit_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("BgaVoidRateProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("BgaVoidRateProcessor_Thickness_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string roiMode = GetParameter<string>("RoiMode");
int bgaMinArea = GetParameter<int>("BgaMinArea");
int bgaMaxArea = GetParameter<int>("BgaMaxArea");
int bgaBlurSize = GetParameter<int>("BgaBlurSize");
double bgaCircularity = GetParameter<double>("BgaCircularity");
int minThresh = GetParameter<int>("MinThreshold");
int maxThresh = GetParameter<int>("MaxThreshold");
int minVoidArea = GetParameter<int>("MinVoidArea");
double voidLimit = GetParameter<double>("VoidLimit");
int thickness = GetParameter<int>("Thickness");
// ç¡®ä¿æ¨¡ç³Šæ ¸ä¸ºå¥‡æ•°
if (bgaBlurSize % 2 == 0) bgaBlurSize++;
OutputData.Clear();
int w = inputImage.Width, h = inputImage.Height;
// 构建ROI掩ç ï¼ˆé™å®šæ£€æµ‹åŒºåŸŸï¼‰
Image<Gray, byte>? roiMask = null;
if (roiMode == "Polygon")
{
int polyCount = GetParameter<int>("PolyCount");
if (polyCount >= 3)
{
var pts = new Point[polyCount];
for (int i = 0; i < polyCount; i++)
pts[i] = new Point(GetParameter<int>($"PolyX{i}"), GetParameter<int>($"PolyY{i}"));
roiMask = new Image<Gray, byte>(w, h);
using var vop = new VectorOfPoint(pts);
using var vvop = new VectorOfVectorOfPoint(vop);
CvInvoke.DrawContours(roiMask, vvop, 0, new MCvScalar(255), -1);
_logger.Debug("ROI Polygon: {Count} points", polyCount);
}
}
OutputData["RoiMode"] = roiMode;
OutputData["RoiMask"] = roiMask;
_logger.Debug("BgaVoidRate 两步� BgaArea=[{Min},{Max}], Blur={Blur}, Circ={Circ}, Thresh=[{TMin},{TMax}]",
bgaMinArea, bgaMaxArea, bgaBlurSize, bgaCircularity, minThresh, maxThresh);
// ================================================================
// 第一步:自动检测BGA焊çƒä½ç½®
// ================================================================
var bgaResults = DetectBgaBalls(inputImage, bgaBlurSize, bgaMinArea, bgaMaxArea, bgaCircularity, roiMask);
_logger.Information("第一步完æˆ? 检测到 {Count} 个BGA焊çƒ", bgaResults.Count);
if (bgaResults.Count == 0)
{
OutputData["BgaVoidResult"] = true;
OutputData["BgaCount"] = 0;
OutputData["BgaBalls"] = bgaResults;
OutputData["VoidRate"] = 0.0;
OutputData["Classification"] = "N/A";
OutputData["ResultText"] = "No BGA detected";
OutputData["Thickness"] = thickness;
OutputData["VoidLimit"] = voidLimit;
OutputData["TotalBgaArea"] = 0;
OutputData["TotalVoidArea"] = 0;
OutputData["TotalVoidCount"] = 0;
roiMask?.Dispose();
return inputImage.Clone();
}
// ================================================================
// 第二步:在æ¯ä¸ªç„ŠçƒåŒºåŸŸå†…检测气æ³?
// ================================================================
int totalBgaArea = 0;
int totalVoidArea = 0;
int totalVoidCount = 0;
foreach (var bga in bgaResults)
{
DetectVoidsInBga(inputImage, bga, minThresh, maxThresh, minVoidArea);
totalBgaArea += bga.BgaArea;
totalVoidArea += bga.VoidPixels;
totalVoidCount += bga.Voids.Count;
}
double overallVoidRate = totalBgaArea > 0 ? (double)totalVoidArea / totalBgaArea * 100.0 : 0;
string classification = overallVoidRate <= voidLimit ? "PASS" : "FAIL";
// 检查æ¯ä¸ªç„Šçƒæ˜¯å¦å•独超æ ?
foreach (var bga in bgaResults)
{
bga.Classification = bga.VoidRate <= voidLimit ? "PASS" : "FAIL";
}
_logger.Information("第二步完� 总气泡率={VoidRate:F1}%, 气泡�{Count}, 判定={Class}",
overallVoidRate, totalVoidCount, classification);
// 输出数æ®
OutputData["BgaVoidResult"] = true;
OutputData["BgaCount"] = bgaResults.Count;
OutputData["BgaBalls"] = bgaResults;
OutputData["VoidRate"] = overallVoidRate;
OutputData["FillRate"] = 100.0 - overallVoidRate;
OutputData["TotalBgaArea"] = totalBgaArea;
OutputData["TotalVoidArea"] = totalVoidArea;
OutputData["TotalVoidCount"] = totalVoidCount;
OutputData["VoidLimit"] = voidLimit;
OutputData["Classification"] = classification;
OutputData["Thickness"] = thickness;
OutputData["ResultText"] = $"Void: {overallVoidRate:F1}% | {classification} | BGA×{bgaResults.Count}";
roiMask?.Dispose();
return inputImage.Clone();
}
/// <summary>
/// 第一步:自动检测BGA焊çƒä½ç½®
/// 使用Otsu二值化 + 轮廓检æµ?+ 圆度过滤 + 椭圆拟åˆ
/// </summary>
private List<BgaBallInfo> DetectBgaBalls(Image<Gray, byte> input, int blurSize, int minArea, int maxArea, double minCircularity, Image<Gray, byte>? roiMask)
{
var results = new List<BgaBallInfo>();
int w = input.Width, h = input.Height;
// 高斯模糊é™å™ª
var blurred = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(input, blurred, new Size(blurSize, blurSize), 0);
// Otsu自动二值化(X-Ray正片:焊ç?暗区域)
var binary = new Image<Gray, byte>(w, h);
CvInvoke.Threshold(blurred, binary, 0, 255, ThresholdType.Otsu | ThresholdType.BinaryInv);
// 如果有ROI掩ç ï¼Œåªä¿ç•™ROI区域内的二值化结果
if (roiMask != null)
{
CvInvoke.BitwiseAnd(binary, roiMask, binary);
}
// å½¢æ€å­¦é—­è¿ç®—å¡«å……å°å­”æ´ž
var kernel = CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(5, 5), new Point(-1, -1));
CvInvoke.MorphologyEx(binary, binary, MorphOp.Close, kernel, new Point(-1, -1), 2, BorderType.Default, new MCvScalar(0));
// 查找轮廓
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(binary, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
int bgaIndex = 0;
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area < minArea || area > maxArea) continue;
// 圆度过滤: circularity = 4π × area / perimeter²
double perimeter = CvInvoke.ArcLength(contours[i], true);
if (perimeter < 1) continue;
double circularity = 4.0 * Math.PI * area / (perimeter * perimeter);
if (circularity < minCircularity) continue;
// 需è¦è‡³å°?个点æ‰èƒ½æ‹Ÿåˆæ¤­åœ†
if (contours[i].Size < 5) continue;
var ellipse = CvInvoke.FitEllipse(contours[i]);
var moments = CvInvoke.Moments(contours[i]);
if (moments.M00 < 1) continue;
bgaIndex++;
results.Add(new BgaBallInfo
{
Index = bgaIndex,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
FittedEllipse = ellipse,
ContourPoints = contours[i].ToArray(),
BgaArea = (int)area,
Circularity = circularity
});
}
// 按é¢ç§¯ä»Žå¤§åˆ°å°æŽ’åº?
results.Sort((a, b) => b.BgaArea.CompareTo(a.BgaArea));
for (int i = 0; i < results.Count; i++) results[i].Index = i + 1;
blurred.Dispose();
binary.Dispose();
kernel.Dispose();
return results;
}
/// <summary>
/// 第二步:在å•个BGA焊çƒåŒºåŸŸå†…检测气æ³?
/// 使用焊çƒè½®å»“作为掩ç ï¼ŒåŒé˜ˆå€¼åˆ†å‰²æ°”泡区åŸ?
/// </summary>
private void DetectVoidsInBga(Image<Gray, byte> input, BgaBallInfo bga, int minThresh, int maxThresh, int minVoidArea)
{
int w = input.Width, h = input.Height;
// 创建该焊çƒçš„æŽ©ç 
var mask = new Image<Gray, byte>(w, h);
using (var vop = new VectorOfPoint(bga.ContourPoints))
using (var vvop = new VectorOfVectorOfPoint(vop))
{
CvInvoke.DrawContours(mask, vvop, 0, new MCvScalar(255), -1);
}
int bgaPixels = CvInvoke.CountNonZero(mask);
bga.BgaArea = bgaPixels;
// åŒé˜ˆå€¼åˆ†å‰²ï¼ˆæ­£ç‰‡æ¨¡å¼ï¼šæ°”æ³?亮,ç°åº¦åœ¨[minThresh, maxThresh]范围内判为气泡)
var voidImg = new Image<Gray, byte>(w, h);
byte[,,] srcData = input.Data;
byte[,,] dstData = voidImg.Data;
byte[,,] maskData = mask.Data;
for (int y = 0; y < h; y++)
{
for (int x = 0; x < w; x++)
{
if (maskData[y, x, 0] > 0)
{
byte val = srcData[y, x, 0];
dstData[y, x, 0] = (val >= minThresh && val <= maxThresh) ? (byte)255 : (byte)0;
}
}
}
int voidPixels = CvInvoke.CountNonZero(voidImg);
bga.VoidPixels = voidPixels;
bga.VoidRate = bgaPixels > 0 ? (double)voidPixels / bgaPixels * 100.0 : 0;
// 检测æ¯ä¸ªæ°”泡的轮廓
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(voidImg, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area < minVoidArea) continue;
var moments = CvInvoke.Moments(contours[i]);
if (moments.M00 < 1) continue;
bga.Voids.Add(new VoidInfo
{
Index = bga.Voids.Count + 1,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
Area = area,
AreaPercent = bgaPixels > 0 ? area / bgaPixels * 100.0 : 0,
BoundingBox = CvInvoke.BoundingRectangle(contours[i]),
ContourPoints = contours[i].ToArray()
});
}
// 按é¢ç§¯ä»Žå¤§åˆ°å°æŽ’åº?
bga.Voids.Sort((a, b) => b.Area.CompareTo(a.Area));
for (int i = 0; i < bga.Voids.Count; i++) bga.Voids[i].Index = i + 1;
mask.Dispose();
voidImg.Dispose();
}
}
/// <summary>
/// å•个BGA焊çƒä¿¡æ¯
/// </summary>
public class BgaBallInfo
{
public int Index { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public RotatedRect FittedEllipse { get; set; }
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
public int BgaArea { get; set; }
public double Circularity { get; set; }
public int VoidPixels { get; set; }
public double VoidRate { get; set; }
public string Classification { get; set; } = "N/A";
public List<VoidInfo> Voids { get; set; } = new();
}
/// <summary>
/// å•个气泡信æ¯
/// </summary>
public class VoidInfo
{
public int Index { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public double Area { get; set; }
public double AreaPercent { get; set; }
public Rectangle BoundingBox { get; set; }
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
}
@@ -0,0 +1,254 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? ContourProcessor.cs
// 讛膩: 頧桀交𪄳蝞堒嚗𣬚鍂鈭擧瘚见銝剔頧桀
// :
// - 璉瘚见㦛譍葉刻蔭撱?
// - 寞旿Y妖凒餈誘頧桀
// - 霈∠頧桀雿閧鸌敺Y妖𪂹葉敹亦畆敶Y嚗?
// - 颲枏枂頧桀靽⊥靘𥕦蝏剖?
// 蝞埈: OpenCV蔭撱𤘪瘚讠瘜?
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 頧桀交𪄳蝞堒
/// </summary>
public class ContourProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ContourProcessor>();
public ContourProcessor()
{
Name = LocalizationHelper.GetString("ContourProcessor_Name");
Description = LocalizationHelper.GetString("ContourProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("TargetColor", new ProcessorParameter(
"TargetColor",
LocalizationHelper.GetString("ContourProcessor_TargetColor"),
typeof(string),
"White",
null,
null,
LocalizationHelper.GetString("ContourProcessor_TargetColor_Desc"),
new string[] { "White", "Black" }));
Parameters.Add("UseThreshold", new ProcessorParameter(
"UseThreshold",
LocalizationHelper.GetString("ContourProcessor_UseThreshold"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContourProcessor_UseThreshold_Desc")));
Parameters.Add("ThresholdValue", new ProcessorParameter(
"ThresholdValue",
LocalizationHelper.GetString("ContourProcessor_ThresholdValue"),
typeof(int),
120,
0,
255,
LocalizationHelper.GetString("ContourProcessor_ThresholdValue_Desc")));
Parameters.Add("UseOtsu", new ProcessorParameter(
"UseOtsu",
LocalizationHelper.GetString("ContourProcessor_UseOtsu"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContourProcessor_UseOtsu_Desc")));
Parameters.Add("MinArea", new ProcessorParameter(
"MinArea",
LocalizationHelper.GetString("ContourProcessor_MinArea"),
typeof(double),
10.0,
0.0,
10000.0,
LocalizationHelper.GetString("ContourProcessor_MinArea_Desc")));
Parameters.Add("MaxArea", new ProcessorParameter(
"MaxArea",
LocalizationHelper.GetString("ContourProcessor_MaxArea"),
typeof(double),
100000.0,
0.0,
1000000.0,
LocalizationHelper.GetString("ContourProcessor_MaxArea_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("ContourProcessor_Thickness"),
typeof(int),
2,
1,
10,
LocalizationHelper.GetString("ContourProcessor_Thickness_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string targetColor = GetParameter<string>("TargetColor");
bool useThreshold = GetParameter<bool>("UseThreshold");
int thresholdValue = GetParameter<int>("ThresholdValue");
bool useOtsu = GetParameter<bool>("UseOtsu");
double minArea = GetParameter<double>("MinArea");
double maxArea = GetParameter<double>("MaxArea");
int thickness = GetParameter<int>("Thickness");
_logger.Debug("Process started: TargetColor = '{TargetColor}', UseThreshold = {UseThreshold}, ThresholdValue = {ThresholdValue}, UseOtsu = {UseOtsu}",
targetColor, useThreshold, thresholdValue, useOtsu);
OutputData.Clear();
// 𥕦遣颲枏祉鍂鈭𤾸?
Image<Gray, byte> processImage = inputImage.Clone();
// 甇仿炊1嚗𡁜𨅯鍳銵䔶
if (useThreshold)
{
_logger.Debug("Applying threshold processing");
Image<Gray, byte> thresholdImage = new Image<Gray, byte>(processImage.Size);
if (useOtsu)
{
// 雿輻鍂Otsu芸𢆡?
CvInvoke.Threshold(processImage, thresholdImage, 0, 255, ThresholdType.Otsu);
_logger.Debug("Applied Otsu threshold");
}
else
{
// 雿輻鍂?
CvInvoke.Threshold(processImage, thresholdImage, thresholdValue, 255, ThresholdType.Binary);
_logger.Debug("Applied binary threshold with value {ThresholdValue}", thresholdValue);
}
// 靽嘥讐鍂鈭舘霂?
try
{
string debugPath = Path.Combine("logs", $"contour_threshold_{DateTime.Now:yyyyMMdd_HHmmss}.png");
Directory.CreateDirectory("logs");
thresholdImage.Save(debugPath);
_logger.Information("Saved threshold image to: {DebugPath}", debugPath);
}
catch (Exception ex)
{
_logger.Warning(ex, "Failed to save threshold image for debugging");
}
processImage.Dispose();
processImage = thresholdImage;
}
// 甇仿炊2嚗𡁜𦦵𤌍糓暺𤏸𠧧嚗屸頧砍㦛?
bool isBlackTarget = targetColor != null &&
(targetColor.Equals("Black", StringComparison.OrdinalIgnoreCase) ||
targetColor.Equals("暺𤏸𠧧", StringComparison.OrdinalIgnoreCase));
if (isBlackTarget)
{
_logger.Debug("Inverting image for black region detection");
CvInvoke.BitwiseNot(processImage, processImage);
// 靽嘥蝧餉蓮𡒊
try
{
string debugPath = Path.Combine("logs", $"contour_inverted_{DateTime.Now:yyyyMMdd_HHmmss}.png");
Directory.CreateDirectory("logs");
processImage.Save(debugPath);
_logger.Information("Saved inverted image to: {DebugPath}", debugPath);
}
catch (Exception ex)
{
_logger.Warning(ex, "Failed to save inverted image for debugging");
}
}
// 甇仿炊3嚗𡁏䰻曇蔭撱?
using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
{
Mat hierarchy = new Mat();
CvInvoke.FindContours(processImage, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
_logger.Debug("Found {TotalContours} total contours before filtering", contours.Size);
List<ContourInfo> contourInfos = new();
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area >= minArea && area <= maxArea)
{
var moments = CvInvoke.Moments(contours[i]);
var boundingRect = CvInvoke.BoundingRectangle(contours[i]);
double perimeter = CvInvoke.ArcLength(contours[i], true);
var circle = CvInvoke.MinEnclosingCircle(contours[i]);
contourInfos.Add(new ContourInfo
{
Index = i,
Area = area,
Perimeter = perimeter,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
BoundingBox = boundingRect,
Points = contours[i].ToArray(),
CircleCenter = circle.Center,
CircleRadius = circle.Radius
});
_logger.Debug("Contour {Index}: Area = {Area}, Center = ({CenterX:F2}, {CenterY:F2})",
i, area, moments.M10 / moments.M00, moments.M01 / moments.M00);
}
else
{
_logger.Debug("Contour {Index} filtered out: Area = {Area} (not in range {MinArea} - {MaxArea})",
i, area, minArea, maxArea);
}
}
OutputData["ContourCount"] = contourInfos.Count;
OutputData["Contours"] = contourInfos;
OutputData["Thickness"] = thickness;
hierarchy.Dispose();
processImage.Dispose();
_logger.Information("Process completed: TargetColor = '{TargetColor}', Found {ContourCount} contours (filtered from {TotalContours})",
targetColor, contourInfos.Count, contours.Size);
return inputImage.Clone();
}
}
}
/// <summary>
/// 頧桀靽⊥
/// </summary>
public class ContourInfo
{
public int Index { get; set; }
public double Area { get; set; }
public double Perimeter { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public Rectangle BoundingBox { get; set; }
public Point[] Points { get; set; } = Array.Empty<Point>();
public PointF CircleCenter { get; set; }
public float CircleRadius { get; set; }
}
@@ -0,0 +1,314 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? EllipseDetectionProcessor.cs
// æè¿°: 椭圆检测算å­ï¼ŒåŸºäºŽè½®å»“分æžå’Œæ¤­åœ†æ‹Ÿåˆæ£€æµ‹å›¾åƒä¸­çš„æ¤­åœ?
// 功能:
// - 阈值分å‰?+ 轮廓æå
// - 椭圆拟åˆï¼ˆFitEllipseï¼?
// - é¢ç§¯/è½´é•¿/离心çŽ?拟åˆè¯¯å·®å¤šç»´è¿‡æ»¤
// - 支æŒåŒé˜ˆå€¼åˆ†å‰²å’Œ Otsu 自动阈å€?
// 算法: 阈值分�+ OpenCV FitEllipse
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 椭圆检测结�
/// </summary>
public class EllipseInfo
{
/// <summary>åºå·</summary>
public int Index { get; set; }
/// <summary>中心点X</summary>
public float CenterX { get; set; }
/// <summary>中心点Y</summary>
public float CenterY { get; set; }
/// <summary>长轴长度</summary>
public float MajorAxis { get; set; }
/// <summary>短轴长度</summary>
public float MinorAxis { get; set; }
/// <summary>旋转角度(度�/summary>
public float Angle { get; set; }
/// <summary>é¢ç§¯</summary>
public double Area { get; set; }
/// <summary>周长</summary>
public double Perimeter { get; set; }
/// <summary>离心çŽ?(0=åœ? 接近1=æ‰æ¤­åœ?</summary>
public double Eccentricity { get; set; }
/// <summary>拟åˆè¯¯å·®ï¼ˆåƒç´ ï¼‰</summary>
public double FitError { get; set; }
/// <summary>轮廓点集</summary>
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
/// <summary>外接矩形</summary>
public Rectangle BoundingBox { get; set; }
}
/// <summary>
/// 椭圆检测器
/// </summary>
public class EllipseDetector
{
private static readonly ILogger _logger = Log.ForContext<EllipseDetector>();
public int MinThreshold { get; set; } = 64;
public int MaxThreshold { get; set; } = 192;
public bool UseOtsu { get; set; } = false;
public int MinContourPoints { get; set; } = 30;
public double MinArea { get; set; } = 100;
public double MaxArea { get; set; } = 1000000;
public float MinMajorAxis { get; set; } = 10;
public double MaxEccentricity { get; set; } = 0.95;
public double MaxFitError { get; set; } = 5.0;
public int Thickness { get; set; } = 2;
/// <summary>执行椭圆检�/summary>
public List<EllipseInfo> Detect(Image<Gray, byte> inputImage, Image<Gray, byte>? roiMask = null)
{
_logger.Debug("Ellipse detection started: UseOtsu={UseOtsu}, MinThreshold={Min}, MaxThreshold={Max}",
UseOtsu, MinThreshold, MaxThreshold);
var results = new List<EllipseInfo>();
using var binary = new Image<Gray, byte>(inputImage.Size);
if (UseOtsu)
{
CvInvoke.Threshold(inputImage, binary, MinThreshold, 255, ThresholdType.Otsu);
_logger.Debug("Using Otsu auto threshold");
}
else
{
// åŒé˜ˆå€¼åˆ†å‰²ï¼šä»‹äºŽMinThresholdå’ŒMaxThresholdä¹‹é—´çš„ä¸ºå‰æ™¯(255),其他为背景(0)
byte[,,] inputData = inputImage.Data;
byte[,,] outputData = binary.Data;
int height = inputImage.Height;
int width = inputImage.Width;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = inputData[y, x, 0];
outputData[y, x, 0] = (pixelValue >= MinThreshold && pixelValue <= MaxThreshold)
? (byte)255
: (byte)0;
}
}
_logger.Debug("Dual threshold segmentation: MinThreshold={Min}, MaxThreshold={Max}", MinThreshold, MaxThreshold);
}
// 应用ROI掩ç 
if (roiMask != null)
{
CvInvoke.BitwiseAnd(binary, roiMask, binary);
}
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(binary, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);
_logger.Debug("Found {Count} contours", contours.Size);
int index = 0;
for (int i = 0; i < contours.Size; i++)
{
var contour = contours[i];
if (contour.Size < Math.Max(5, MinContourPoints)) continue;
double area = CvInvoke.ContourArea(contour);
if (area < MinArea || area > MaxArea) continue;
RotatedRect ellipseRect = CvInvoke.FitEllipse(contour);
float majorAxis = Math.Max(ellipseRect.Size.Width, ellipseRect.Size.Height);
float minorAxis = Math.Min(ellipseRect.Size.Width, ellipseRect.Size.Height);
if (majorAxis < MinMajorAxis) continue;
double eccentricity = 0;
if (majorAxis > 0)
{
double ratio = minorAxis / majorAxis;
eccentricity = Math.Sqrt(1.0 - ratio * ratio);
}
if (eccentricity > MaxEccentricity) continue;
double fitError = ComputeFitError(contour.ToArray(), ellipseRect);
if (fitError > MaxFitError) continue;
results.Add(new EllipseInfo
{
Index = index++,
CenterX = ellipseRect.Center.X,
CenterY = ellipseRect.Center.Y,
MajorAxis = majorAxis,
MinorAxis = minorAxis,
Angle = ellipseRect.Angle,
Area = area,
Perimeter = CvInvoke.ArcLength(contour, true),
Eccentricity = eccentricity,
FitError = fitError,
ContourPoints = contour.ToArray(),
BoundingBox = CvInvoke.BoundingRectangle(contour)
});
}
_logger.Information("Ellipse detection completed: detected {Count} ellipses", results.Count);
return results;
}
private static double ComputeFitError(Point[] contourPoints, RotatedRect ellipse)
{
double cx = ellipse.Center.X, cy = ellipse.Center.Y;
double a = Math.Max(ellipse.Size.Width, ellipse.Size.Height) / 2.0;
double b = Math.Min(ellipse.Size.Width, ellipse.Size.Height) / 2.0;
double angleRad = ellipse.Angle * Math.PI / 180.0;
double cosA = Math.Cos(angleRad), sinA = Math.Sin(angleRad);
if (a < 1e-6) return double.MaxValue;
double totalError = 0;
foreach (var pt in contourPoints)
{
double dx = pt.X - cx, dy = pt.Y - cy;
double localX = dx * cosA + dy * sinA;
double localY = -dx * sinA + dy * cosA;
double ellipseVal = (localX * localX) / (a * a) + (localY * localY) / (b * b);
totalError += Math.Abs(Math.Sqrt(ellipseVal) - 1.0) * Math.Sqrt(a * b);
}
return totalError / contourPoints.Length;
}
}
/// <summary>
/// 椭圆检测算�
/// </summary>
public class EllipseDetectionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<EllipseDetectionProcessor>();
public EllipseDetectionProcessor()
{
Name = LocalizationHelper.GetString("EllipseDetectionProcessor_Name");
Description = LocalizationHelper.GetString("EllipseDetectionProcessor_Description");
}
protected override void InitializeParameters()
{
// ── 多边形ROI(由UI注入,最�2个点�──
Parameters.Add("PolyCount", new ProcessorParameter("PolyCount", "PolyCount", typeof(int), 0, null, null, "") { IsVisible = false });
for (int i = 0; i < 32; i++)
{
Parameters.Add($"PolyX{i}", new ProcessorParameter($"PolyX{i}", $"PolyX{i}", typeof(int), 0, null, null, "") { IsVisible = false });
Parameters.Add($"PolyY{i}", new ProcessorParameter($"PolyY{i}", $"PolyY{i}", typeof(int), 0, null, null, "") { IsVisible = false });
}
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold", LocalizationHelper.GetString("EllipseDetectionProcessor_MinThreshold"),
typeof(int), 64, 0, 255,
LocalizationHelper.GetString("EllipseDetectionProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxThreshold"),
typeof(int), 192, 0, 255,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxThreshold_Desc")));
Parameters.Add("UseOtsu", new ProcessorParameter(
"UseOtsu", LocalizationHelper.GetString("EllipseDetectionProcessor_UseOtsu"),
typeof(bool), false, null, null,
LocalizationHelper.GetString("EllipseDetectionProcessor_UseOtsu_Desc")));
Parameters.Add("MinContourPoints", new ProcessorParameter(
"MinContourPoints", LocalizationHelper.GetString("EllipseDetectionProcessor_MinContourPoints"),
typeof(int), 30, 5, 1000,
LocalizationHelper.GetString("EllipseDetectionProcessor_MinContourPoints_Desc")));
Parameters.Add("MinArea", new ProcessorParameter(
"MinArea", LocalizationHelper.GetString("EllipseDetectionProcessor_MinArea"),
typeof(double), 100.0, 0.0, 1000000.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MinArea_Desc")));
Parameters.Add("MaxArea", new ProcessorParameter(
"MaxArea", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxArea"),
typeof(double), 1000000.0, 0.0, 10000000.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxArea_Desc")));
Parameters.Add("MaxEccentricity", new ProcessorParameter(
"MaxEccentricity", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxEccentricity"),
typeof(double), 0.95, 0.0, 1.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxEccentricity_Desc")));
Parameters.Add("MaxFitError", new ProcessorParameter(
"MaxFitError", LocalizationHelper.GetString("EllipseDetectionProcessor_MaxFitError"),
typeof(double), 5.0, 0.0, 50.0,
LocalizationHelper.GetString("EllipseDetectionProcessor_MaxFitError_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness", LocalizationHelper.GetString("EllipseDetectionProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("EllipseDetectionProcessor_Thickness_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int thickness = GetParameter<int>("Thickness");
_logger.Debug("Ellipse detection started");
OutputData.Clear();
// 构建多边形ROI掩ç 
int polyCount = GetParameter<int>("PolyCount");
Image<Gray, byte>? roiMask = null;
if (polyCount >= 3)
{
var pts = new Point[polyCount];
for (int i = 0; i < polyCount; i++)
pts[i] = new Point(GetParameter<int>($"PolyX{i}"), GetParameter<int>($"PolyY{i}"));
roiMask = new Image<Gray, byte>(inputImage.Width, inputImage.Height);
using var vop = new VectorOfPoint(pts);
using var vvop = new VectorOfVectorOfPoint(vop);
CvInvoke.DrawContours(roiMask, vvop, 0, new MCvScalar(255), -1);
}
var detector = new EllipseDetector
{
MinThreshold = GetParameter<int>("MinThreshold"),
MaxThreshold = GetParameter<int>("MaxThreshold"),
UseOtsu = GetParameter<bool>("UseOtsu"),
MinContourPoints = GetParameter<int>("MinContourPoints"),
MinArea = GetParameter<double>("MinArea"),
MaxArea = GetParameter<double>("MaxArea"),
MaxEccentricity = GetParameter<double>("MaxEccentricity"),
MaxFitError = GetParameter<double>("MaxFitError"),
Thickness = thickness
};
var ellipses = detector.Detect(inputImage, roiMask);
OutputData["Ellipses"] = ellipses;
OutputData["EllipseCount"] = ellipses.Count;
OutputData["Thickness"] = thickness;
roiMask?.Dispose();
_logger.Information("Ellipse detection completed: detected {Count} ellipses", ellipses.Count);
return inputImage.Clone();
}
}
@@ -0,0 +1,133 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? FillRateProcessor.cs
// æè¿°: 通孔填锡率测é‡ç®—å­ï¼ˆå€¾æ–œæŠ•影几何法),基于四椭圆ROI
// 功能:
// - æ ·å“倾斜çº?5°放置,利用投影ä½ç§»å…³ç³»è®¡ç®—填锡率
// - 四个椭圆定义�
// E1 = 通孔底部轮廓
// E2 = 通孔顶部轮廓
// E3 = 填锡起点(与E1é‡åˆï¼Œä»£è¡?%填锡ï¼?
// E4 = 填锡终点(锡实际填充到的高度�
// - 填锡�= |E4中心 - E3中心| / |E2中心 - E1中心| × 100%
// - 纯几何方法,ä¸ä¾èµ–ç°åº¦åˆ†æž?
// - IPC-610 THT 分级判定(Class 1/2/3�
// 算法: 倾斜投影ä½ç§»æ¯”例
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 通孔填锡率测é‡ç®—å­ï¼ˆå€¾æ–œæŠ•影几何法)
/// </summary>
public class FillRateProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<FillRateProcessor>();
public FillRateProcessor()
{
Name = LocalizationHelper.GetString("FillRateProcessor_Name");
Description = LocalizationHelper.GetString("FillRateProcessor_Description");
}
protected override void InitializeParameters()
{
// 四个椭圆(由交互控件注入,UIä¸å¯è§ï¼‰
AddEllipseParams("E1", 200, 250, 60, 50, 0); // 底部
AddEllipseParams("E2", 220, 180, 60, 50, 0); // 顶部
AddEllipseParams("E3", 200, 250, 60, 50, 0); // 填锡起点�E1�
AddEllipseParams("E4", 210, 220, 55, 45, 0); // 填锡终点
Parameters.Add("THTLimit", new ProcessorParameter(
"THTLimit",
LocalizationHelper.GetString("FillRateProcessor_THTLimit"),
typeof(double), 75.0, 0.0, 100.0,
LocalizationHelper.GetString("FillRateProcessor_THTLimit_Desc")));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("FillRateProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("FillRateProcessor_Thickness_Desc")));
}
private void AddEllipseParams(string prefix, int cx, int cy, double a, double b, double angle)
{
Parameters.Add($"{prefix}_CX", new ProcessorParameter($"{prefix}_CX", $"{prefix}_CX", typeof(int), cx, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_CY", new ProcessorParameter($"{prefix}_CY", $"{prefix}_CY", typeof(int), cy, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_A", new ProcessorParameter($"{prefix}_A", $"{prefix}_A", typeof(double), a, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_B", new ProcessorParameter($"{prefix}_B", $"{prefix}_B", typeof(double), b, null, null, "") { IsVisible = false });
Parameters.Add($"{prefix}_Angle", new ProcessorParameter($"{prefix}_Angle", $"{prefix}_Angle", typeof(double), angle, null, null, "") { IsVisible = false });
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double thtLimit = GetParameter<double>("THTLimit");
int thickness = GetParameter<int>("Thickness");
// 获å–四个椭圆中心
int e1cx = GetParameter<int>("E1_CX"), e1cy = GetParameter<int>("E1_CY");
int e2cx = GetParameter<int>("E2_CX"), e2cy = GetParameter<int>("E2_CY");
int e3cx = GetParameter<int>("E3_CX"), e3cy = GetParameter<int>("E3_CY");
int e4cx = GetParameter<int>("E4_CX"), e4cy = GetParameter<int>("E4_CY");
// èŽ·å–æ¤­åœ†è½´å‚数(用于绘制ï¼?
double e1a = GetParameter<double>("E1_A"), e1b = GetParameter<double>("E1_B"), e1ang = GetParameter<double>("E1_Angle");
double e2a = GetParameter<double>("E2_A"), e2b = GetParameter<double>("E2_B"), e2ang = GetParameter<double>("E2_Angle");
double e3a = GetParameter<double>("E3_A"), e3b = GetParameter<double>("E3_B"), e3ang = GetParameter<double>("E3_Angle");
double e4a = GetParameter<double>("E4_A"), e4b = GetParameter<double>("E4_B"), e4ang = GetParameter<double>("E4_Angle");
_logger.Debug("FillRate: E1=({E1X},{E1Y}), E2=({E2X},{E2Y}), E3=({E3X},{E3Y}), E4=({E4X},{E4Y})",
e1cx, e1cy, e2cx, e2cy, e3cx, e3cy, e4cx, e4cy);
OutputData.Clear();
// 计算通孔全高度的投影ä½ç§»ï¼ˆE1底部 â†?E2顶部ï¼?
double fullDx = e2cx - e1cx;
double fullDy = e2cy - e1cy;
double fullDistance = Math.Sqrt(fullDx * fullDx + fullDy * fullDy);
// 计算填锡高度的投影ä½ç§»ï¼ˆE3起点 â†?E4终点ï¼?
double fillDx = e4cx - e3cx;
double fillDy = e4cy - e3cy;
double fillDistance = Math.Sqrt(fillDx * fillDx + fillDy * fillDy);
// 填锡çŽ?= 填锡ä½ç§» / 全高度ä½ç§?
double fillRate = fullDistance > 0 ? (fillDistance / fullDistance) * 100.0 : 0;
fillRate = Math.Clamp(fillRate, 0, 100);
// 判定
string classification = fillRate >= thtLimit ? "PASS" : "FAIL";
// 存储结果
OutputData["FillRateResult"] = true;
OutputData["FillRate"] = fillRate;
OutputData["VoidRate"] = 100.0 - fillRate;
OutputData["FullDistance"] = fullDistance;
OutputData["FillDistance"] = fillDistance;
OutputData["THTLimit"] = thtLimit;
OutputData["Classification"] = classification;
OutputData["Thickness"] = thickness;
// 椭圆几何(用于绘制)
OutputData["E1"] = (new Point(e1cx, e1cy), new Size((int)e1a, (int)e1b), e1ang);
OutputData["E2"] = (new Point(e2cx, e2cy), new Size((int)e2a, (int)e2b), e2ang);
OutputData["E3"] = (new Point(e3cx, e3cy), new Size((int)e3a, (int)e3b), e3ang);
OutputData["E4"] = (new Point(e4cx, e4cy), new Size((int)e4a, (int)e4b), e4ang);
string resultText = $"{fillRate:F1}% | {classification}";
OutputData["ResultText"] = resultText;
_logger.Information("FillRate (geometric): {Rate}%, {Class}, FullDist={FD:F1}, FillDist={FiD:F1}",
fillRate, classification, fullDistance, fillDistance);
return inputImage.Clone();
}
}
@@ -0,0 +1,149 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? LineMeasurementProcessor.cs
// æè¿°: 直线测é‡ç®—å­ï¼Œç”¨äºŽæµ‹é‡å›¾åƒä¸­ä¸¤ç‚¹ä¹‹é—´çš„è·ç¦?
// 功能:
// - ç”¨æˆ·æŒ‡å®šä¸¤ä¸ªç‚¹åæ ‡ï¼ˆåƒç´ åæ ‡ï¼?
// - 计算两点之间的欧æ°è·ç¦»ï¼ˆåƒç´ å•ä½ï¼?
// - 支æŒåƒç´ å°ºå¯¸æ ‡å®šï¼Œè¾“出实际物ç†è·ç¦?
// - 在图åƒä¸Šç»˜åˆ¶æµ‹é‡çº¿å’Œæ ‡æ³¨
// - 输出测é‡ç»“果供åŽç»­å¤„ç†ä½¿ç”?
// 算法: 欧æ°è·ç¦»è®¡ç®—
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 直线测é‡ç®—å­ - 测é‡ä¸¤ç‚¹ä¹‹é—´çš„è·ç¦?
/// </summary>
public class LineMeasurementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<LineMeasurementProcessor>();
public LineMeasurementProcessor()
{
Name = LocalizationHelper.GetString("LineMeasurementProcessor_Name");
Description = LocalizationHelper.GetString("LineMeasurementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("X1", new ProcessorParameter(
"X1",
LocalizationHelper.GetString("LineMeasurementProcessor_X1"),
typeof(int), 100, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_X1_Desc"))
{ IsVisible = false });
Parameters.Add("Y1", new ProcessorParameter(
"Y1",
LocalizationHelper.GetString("LineMeasurementProcessor_Y1"),
typeof(int), 100, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_Y1_Desc"))
{ IsVisible = false });
Parameters.Add("X2", new ProcessorParameter(
"X2",
LocalizationHelper.GetString("LineMeasurementProcessor_X2"),
typeof(int), 400, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_X2_Desc"))
{ IsVisible = false });
Parameters.Add("Y2", new ProcessorParameter(
"Y2",
LocalizationHelper.GetString("LineMeasurementProcessor_Y2"),
typeof(int), 400, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_Y2_Desc"))
{ IsVisible = false });
Parameters.Add("PixelSize", new ProcessorParameter(
"PixelSize",
LocalizationHelper.GetString("LineMeasurementProcessor_PixelSize"),
typeof(double), 1.0, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_PixelSize_Desc")));
Parameters.Add("Unit", new ProcessorParameter(
"Unit",
LocalizationHelper.GetString("LineMeasurementProcessor_Unit"),
typeof(string), "px", null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_Unit_Desc"),
new string[] { "px", "mm", "μm", "cm" }));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("LineMeasurementProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("LineMeasurementProcessor_Thickness_Desc")));
Parameters.Add("ShowLabel", new ProcessorParameter(
"ShowLabel",
LocalizationHelper.GetString("LineMeasurementProcessor_ShowLabel"),
typeof(bool), true, null, null,
LocalizationHelper.GetString("LineMeasurementProcessor_ShowLabel_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int x1 = GetParameter<int>("X1");
int y1 = GetParameter<int>("Y1");
int x2 = GetParameter<int>("X2");
int y2 = GetParameter<int>("Y2");
double pixelSize = GetParameter<double>("PixelSize");
string unit = GetParameter<string>("Unit");
int thickness = GetParameter<int>("Thickness");
bool showLabel = GetParameter<bool>("ShowLabel");
_logger.Debug("LineMeasurement: ({X1},{Y1}) -> ({X2},{Y2}), PixelSize={PixelSize}, Unit={Unit}",
x1, y1, x2, y2, pixelSize, unit);
OutputData.Clear();
// é™åˆ¶å标在图åƒèŒƒå›´å†…
x1 = Math.Clamp(x1, 0, inputImage.Width - 1);
y1 = Math.Clamp(y1, 0, inputImage.Height - 1);
x2 = Math.Clamp(x2, 0, inputImage.Width - 1);
y2 = Math.Clamp(y2, 0, inputImage.Height - 1);
// 计算åƒç´ è·ç¦»
double dx = x2 - x1;
double dy = y2 - y1;
double pixelDistance = Math.Sqrt(dx * dx + dy * dy);
// 计算实际è·ç¦»
double actualDistance = pixelDistance * pixelSize;
// 计算角度(相对于水平方å‘ï¼?
double angleRad = Math.Atan2(dy, dx);
double angleDeg = angleRad * 180.0 / Math.PI;
// 存储测é‡ç»“æžœ
OutputData["MeasurementType"] = "Line";
OutputData["Point1"] = new Point(x1, y1);
OutputData["Point2"] = new Point(x2, y2);
OutputData["PixelDistance"] = pixelDistance;
OutputData["ActualDistance"] = actualDistance;
OutputData["Unit"] = unit;
OutputData["Angle"] = angleDeg;
OutputData["Thickness"] = thickness;
OutputData["ShowLabel"] = showLabel;
// 构建测é‡ä¿¡æ¯æ–‡æœ¬
string distanceText = unit == "px"
? $"{pixelDistance:F2} px"
: $"{actualDistance:F4} {unit} ({pixelDistance:F2} px)";
OutputData["MeasurementText"] = distanceText;
_logger.Information("LineMeasurement completed: Distance={Distance}, Angle={Angle:F2}°",
distanceText, angleDeg);
return inputImage.Clone();
}
}
@@ -0,0 +1,115 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? PointToLineProcessor.cs
// æè¿°: 点到直线è·ç¦»æµ‹é‡ç®—å­
// 功能:
// - 用户定义一æ¡ç›´çº¿ï¼ˆä¸¤ä¸ªç«¯ç‚¹ï¼‰å’Œä¸€ä¸ªæµ‹é‡ç‚¹
// - 计算测é‡ç‚¹åˆ°ç›´çº¿çš„垂直è·ç¦?
// - 支æŒåƒç´ å°ºå¯¸æ ‡å®šè¾“出物ç†è·ç¦»
// - 在图åƒä¸Šç»˜åˆ¶ç›´çº¿ã€æµ‹é‡ç‚¹ã€åž‚足和è·ç¦»æ ‡æ³¨
// 算法: 点到直线è·ç¦»å…¬å¼
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
public class PointToLineProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<PointToLineProcessor>();
public PointToLineProcessor()
{
Name = LocalizationHelper.GetString("PointToLineProcessor_Name");
Description = LocalizationHelper.GetString("PointToLineProcessor_Description");
}
protected override void InitializeParameters()
{
// 直线两端ç‚?+ 测é‡ç‚¹ï¼ˆç”±äº¤äº’控件注入)
Parameters.Add("L1X", new ProcessorParameter("L1X", "L1X", typeof(int), 100, null, null, "") { IsVisible = false });
Parameters.Add("L1Y", new ProcessorParameter("L1Y", "L1Y", typeof(int), 200, null, null, "") { IsVisible = false });
Parameters.Add("L2X", new ProcessorParameter("L2X", "L2X", typeof(int), 400, null, null, "") { IsVisible = false });
Parameters.Add("L2Y", new ProcessorParameter("L2Y", "L2Y", typeof(int), 200, null, null, "") { IsVisible = false });
Parameters.Add("PX", new ProcessorParameter("PX", "PX", typeof(int), 250, null, null, "") { IsVisible = false });
Parameters.Add("PY", new ProcessorParameter("PY", "PY", typeof(int), 100, null, null, "") { IsVisible = false });
Parameters.Add("PixelSize", new ProcessorParameter(
"PixelSize",
LocalizationHelper.GetString("PointToLineProcessor_PixelSize"),
typeof(double), 1.0, null, null,
LocalizationHelper.GetString("PointToLineProcessor_PixelSize_Desc")));
Parameters.Add("Unit", new ProcessorParameter(
"Unit",
LocalizationHelper.GetString("PointToLineProcessor_Unit"),
typeof(string), "px", null, null,
LocalizationHelper.GetString("PointToLineProcessor_Unit_Desc"),
new string[] { "px", "mm", "μm", "cm" }));
Parameters.Add("Thickness", new ProcessorParameter(
"Thickness",
LocalizationHelper.GetString("PointToLineProcessor_Thickness"),
typeof(int), 2, 1, 10,
LocalizationHelper.GetString("PointToLineProcessor_Thickness_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int l1x = GetParameter<int>("L1X"), l1y = GetParameter<int>("L1Y");
int l2x = GetParameter<int>("L2X"), l2y = GetParameter<int>("L2Y");
int px = GetParameter<int>("PX"), py = GetParameter<int>("PY");
double pixelSize = GetParameter<double>("PixelSize");
string unit = GetParameter<string>("Unit");
int thickness = GetParameter<int>("Thickness");
OutputData.Clear();
// 点到直线è·ç¦»å…¬å¼: |AB × AP| / |AB|
double abx = l2x - l1x, aby = l2y - l1y;
double abLen = Math.Sqrt(abx * abx + aby * aby);
double pixelDistance = 0;
int footX = px, footY = py;
if (abLen > 0.001)
{
// å‰ç§¯æ±‚è·ç¦?
double cross = Math.Abs(abx * (l1y - py) - aby * (l1x - px));
pixelDistance = cross / abLen;
// 垂足: æŠ•å½±å‚æ•° t = AP·AB / |AB|²
double apx = px - l1x, apy = py - l1y;
double t = (apx * abx + apy * aby) / (abLen * abLen);
footX = (int)(l1x + t * abx);
footY = (int)(l1y + t * aby);
OutputData["ProjectionT"] = t;
}
double actualDistance = pixelDistance * pixelSize;
string distanceText = unit == "px"
? $"{pixelDistance:F2} px"
: $"{actualDistance:F4} {unit} ({pixelDistance:F2} px)";
OutputData["PointToLineResult"] = true;
OutputData["Line1"] = new Point(l1x, l1y);
OutputData["Line2"] = new Point(l2x, l2y);
OutputData["MeasurePoint"] = new Point(px, py);
OutputData["FootPoint"] = new Point(footX, footY);
OutputData["PixelDistance"] = pixelDistance;
OutputData["ActualDistance"] = actualDistance;
OutputData["Unit"] = unit;
OutputData["Thickness"] = thickness;
OutputData["DistanceText"] = distanceText;
_logger.Information("PointToLine: Distance={Dist}, Foot=({FX},{FY})", distanceText, footX, footY);
return inputImage.Clone();
}
}
@@ -0,0 +1,230 @@
// ============================================================================
// 文件å? VoidMeasurementProcessor.cs
// æè¿°: 空隙测é‡ç®—å­
//
// å¤„ç†æµç¨‹:
// 1. 构建多边形ROI掩ç ï¼Œè®¡ç®—ROIé¢ç§¯
// 2. 在ROI内进行åŒé˜ˆå€¼åˆ†å‰²æå–气泡区åŸ?
// 3. å½¢æ€å­¦è†¨èƒ€åˆå¹¶ç›¸é‚»æ°”泡
// 4. 轮廓检测,计算æ¯ä¸ªæ°”泡é¢ç§¯
// 5. 计算空隙çŽ?= 总气泡é¢ç§?/ ROIé¢ç§¯
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
public class VoidMeasurementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<VoidMeasurementProcessor>();
public VoidMeasurementProcessor()
{
Name = LocalizationHelper.GetString("VoidMeasurementProcessor_Name");
Description = LocalizationHelper.GetString("VoidMeasurementProcessor_Description");
}
protected override void InitializeParameters()
{
// ── 多边形ROI(由UI注入,最�2个点�──
Parameters.Add("PolyCount", new ProcessorParameter("PolyCount", "PolyCount", typeof(int), 0, null, null, "") { IsVisible = false });
for (int i = 0; i < 32; i++)
{
Parameters.Add($"PolyX{i}", new ProcessorParameter($"PolyX{i}", $"PolyX{i}", typeof(int), 0, null, null, "") { IsVisible = false });
Parameters.Add($"PolyY{i}", new ProcessorParameter($"PolyY{i}", $"PolyY{i}", typeof(int), 0, null, null, "") { IsVisible = false });
}
// ── æ°”æ³¡æ£€æµ‹å‚æ•?──
Parameters.Add("MinThreshold", new ProcessorParameter(
"MinThreshold",
LocalizationHelper.GetString("VoidMeasurementProcessor_MinThreshold"),
typeof(int), 128, 0, 255,
LocalizationHelper.GetString("VoidMeasurementProcessor_MinThreshold_Desc")));
Parameters.Add("MaxThreshold", new ProcessorParameter(
"MaxThreshold",
LocalizationHelper.GetString("VoidMeasurementProcessor_MaxThreshold"),
typeof(int), 255, 0, 255,
LocalizationHelper.GetString("VoidMeasurementProcessor_MaxThreshold_Desc")));
Parameters.Add("MinVoidArea", new ProcessorParameter(
"MinVoidArea",
LocalizationHelper.GetString("VoidMeasurementProcessor_MinVoidArea"),
typeof(int), 10, 1, 100000,
LocalizationHelper.GetString("VoidMeasurementProcessor_MinVoidArea_Desc")));
Parameters.Add("MergeRadius", new ProcessorParameter(
"MergeRadius",
LocalizationHelper.GetString("VoidMeasurementProcessor_MergeRadius"),
typeof(int), 3, 0, 30,
LocalizationHelper.GetString("VoidMeasurementProcessor_MergeRadius_Desc")));
Parameters.Add("BlurSize", new ProcessorParameter(
"BlurSize",
LocalizationHelper.GetString("VoidMeasurementProcessor_BlurSize"),
typeof(int), 3, 1, 31,
LocalizationHelper.GetString("VoidMeasurementProcessor_BlurSize_Desc")));
Parameters.Add("VoidLimit", new ProcessorParameter(
"VoidLimit",
LocalizationHelper.GetString("VoidMeasurementProcessor_VoidLimit"),
typeof(double), 25.0, 0.0, 100.0,
LocalizationHelper.GetString("VoidMeasurementProcessor_VoidLimit_Desc")));
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int minThresh = GetParameter<int>("MinThreshold");
int maxThresh = GetParameter<int>("MaxThreshold");
int minVoidArea = GetParameter<int>("MinVoidArea");
int mergeRadius = GetParameter<int>("MergeRadius");
int blurSize = GetParameter<int>("BlurSize");
double voidLimit = GetParameter<double>("VoidLimit");
if (blurSize % 2 == 0) blurSize++;
OutputData.Clear();
int w = inputImage.Width, h = inputImage.Height;
// ── 构建多边形ROIæŽ©ç  â”€â”€
int polyCount = GetParameter<int>("PolyCount");
Image<Gray, byte>? roiMask = null;
Point[]? roiPoints = null;
if (polyCount >= 3)
{
roiPoints = new Point[polyCount];
for (int i = 0; i < polyCount; i++)
roiPoints[i] = new Point(GetParameter<int>($"PolyX{i}"), GetParameter<int>($"PolyY{i}"));
roiMask = new Image<Gray, byte>(w, h);
using var vop = new VectorOfPoint(roiPoints);
using var vvop = new VectorOfVectorOfPoint(vop);
CvInvoke.DrawContours(roiMask, vvop, 0, new MCvScalar(255), -1);
}
else
{
// 无ROI时使用全�
roiMask = new Image<Gray, byte>(w, h);
roiMask.SetValue(new Gray(255));
}
int roiArea = CvInvoke.CountNonZero(roiMask);
_logger.Debug("VoidMeasurement: ROI area={Area}, Thresh=[{Min},{Max}], MergeR={MR}",
roiArea, minThresh, maxThresh, mergeRadius);
// ── 高斯模糊é™å™ª ──
var blurred = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(inputImage, blurred, new Size(blurSize, blurSize), 0);
// ── åŒé˜ˆå€¼åˆ†å‰²æå–气泡(亮区域) ──
var voidImg = new Image<Gray, byte>(w, h);
byte[,,] srcData = blurred.Data;
byte[,,] dstData = voidImg.Data;
byte[,,] maskData = roiMask.Data;
for (int y = 0; y < h; y++)
{
for (int x = 0; x < w; x++)
{
if (maskData[y, x, 0] > 0)
{
byte val = srcData[y, x, 0];
dstData[y, x, 0] = (val >= minThresh && val <= maxThresh) ? (byte)255 : (byte)0;
}
}
}
// ── å½¢æ€å­¦è†¨èƒ€åˆå¹¶ç›¸é‚»æ°”泡 ──
if (mergeRadius > 0)
{
int kernelSize = mergeRadius * 2 + 1;
using var kernel = CvInvoke.GetStructuringElement(ElementShape.Ellipse,
new Size(kernelSize, kernelSize), new Point(-1, -1));
CvInvoke.Dilate(voidImg, voidImg, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0));
// 与ROI掩ç å–交集,防止膨胀超出ROI
CvInvoke.BitwiseAnd(voidImg, roiMask, voidImg);
}
// ── 轮廓检�──
using var contours = new VectorOfVectorOfPoint();
using var hierarchy = new Mat();
CvInvoke.FindContours(voidImg, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
var voids = new List<VoidRegionInfo>();
int totalVoidArea = 0;
for (int i = 0; i < contours.Size; i++)
{
double area = CvInvoke.ContourArea(contours[i]);
if (area < minVoidArea) continue;
var moments = CvInvoke.Moments(contours[i]);
if (moments.M00 < 1) continue;
int intArea = (int)Math.Round(area);
totalVoidArea += intArea;
voids.Add(new VoidRegionInfo
{
Index = voids.Count + 1,
CenterX = moments.M10 / moments.M00,
CenterY = moments.M01 / moments.M00,
Area = intArea,
AreaPercent = roiArea > 0 ? area / roiArea * 100.0 : 0,
BoundingBox = CvInvoke.BoundingRectangle(contours[i]),
ContourPoints = contours[i].ToArray()
});
}
// 按é¢ç§¯ä»Žå¤§åˆ°å°æŽ’åº?
voids.Sort((a, b) => b.Area.CompareTo(a.Area));
for (int i = 0; i < voids.Count; i++) voids[i].Index = i + 1;
double voidRate = roiArea > 0 ? (double)totalVoidArea / roiArea * 100.0 : 0;
string classification = voidRate <= voidLimit ? "PASS" : "FAIL";
int maxVoidArea = voids.Count > 0 ? voids[0].Area : 0;
_logger.Information("VoidMeasurement: VoidRate={Rate:F1}%, Voids={Count}, MaxArea={Max}, {Class}",
voidRate, voids.Count, maxVoidArea, classification);
// ── è¾“å‡ºæ•°æ® â”€â”€
OutputData["VoidMeasurementResult"] = true;
OutputData["RoiArea"] = roiArea;
OutputData["RoiPoints"] = roiPoints;
OutputData["TotalVoidArea"] = totalVoidArea;
OutputData["VoidRate"] = voidRate;
OutputData["VoidLimit"] = voidLimit;
OutputData["VoidCount"] = voids.Count;
OutputData["MaxVoidArea"] = maxVoidArea;
OutputData["Classification"] = classification;
OutputData["Voids"] = voids;
OutputData["ResultText"] = $"Void: {voidRate:F1}% | {classification} | {voids.Count} voids | ROI: {roiArea}px";
blurred.Dispose();
voidImg.Dispose();
roiMask.Dispose();
return inputImage.Clone();
}
}
/// <summary>
/// å•个空隙区域信æ¯
/// </summary>
public class VoidRegionInfo
{
public int Index { get; set; }
public double CenterX { get; set; }
public double CenterY { get; set; }
public int Area { get; set; }
public double AreaPercent { get; set; }
public Rectangle BoundingBox { get; set; }
public Point[] ContourPoints { get; set; } = Array.Empty<Point>();
}
@@ -0,0 +1,197 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? BandPassFilterProcessor.cs
// æè¿°: 带通滤波器算å­ï¼Œç”¨äºŽé¢‘域图åƒå¤„ç?
// 功能:
// - 在频域中ä¿ç•™ç‰¹å®šé¢‘率范围的信å?
// - 支æŒç†æƒ³ã€å·´ç‰¹æ²ƒæ–¯ã€é«˜æ–¯ä¸‰ç§æ»¤æ³¢å™¨ç±»åž‹
// - å¯è°ƒèŠ‚ä½Žé¢‘å’Œé«˜é¢‘æˆªæ­¢é¢‘çŽ‡
// - 通过FFT实现频域滤波
// 算法: 基于离散傅里å¶å˜æ¢ï¼ˆDFT)的频域滤波
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 带通滤波器算å­
/// </summary>
public class BandPassFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<BandPassFilterProcessor>();
public BandPassFilterProcessor()
{
Name = LocalizationHelper.GetString("BandPassFilterProcessor_Name");
Description = LocalizationHelper.GetString("BandPassFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("LowCutoff", new ProcessorParameter(
"LowCutoff",
LocalizationHelper.GetString("BandPassFilterProcessor_LowCutoff"),
typeof(int),
10,
1,
200,
LocalizationHelper.GetString("BandPassFilterProcessor_LowCutoff_Desc")));
Parameters.Add("HighCutoff", new ProcessorParameter(
"HighCutoff",
LocalizationHelper.GetString("BandPassFilterProcessor_HighCutoff"),
typeof(int),
50,
2,
500,
LocalizationHelper.GetString("BandPassFilterProcessor_HighCutoff_Desc")));
Parameters.Add("FilterType", new ProcessorParameter(
"FilterType",
LocalizationHelper.GetString("BandPassFilterProcessor_FilterType"),
typeof(string),
"Ideal",
null,
null,
LocalizationHelper.GetString("BandPassFilterProcessor_FilterType_Desc"),
new string[] { "Ideal", "Butterworth", "Gaussian" }));
Parameters.Add("Order", new ProcessorParameter(
"Order",
LocalizationHelper.GetString("BandPassFilterProcessor_Order"),
typeof(int),
2,
1,
10,
LocalizationHelper.GetString("BandPassFilterProcessor_Order_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int lowCutoff = GetParameter<int>("LowCutoff");
int highCutoff = GetParameter<int>("HighCutoff");
string filterType = GetParameter<string>("FilterType");
int order = GetParameter<int>("Order");
if (highCutoff <= lowCutoff)
{
highCutoff = lowCutoff + 10;
}
var floatImage = inputImage.Convert<Gray, float>();
var imaginaryImage = new Image<Gray, float>(floatImage.Size);
imaginaryImage.SetZero();
using (var planes = new Emgu.CV.Util.VectorOfMat())
{
planes.Push(floatImage.Mat);
planes.Push(imaginaryImage.Mat);
Mat complexMat = new Mat();
CvInvoke.Merge(planes, complexMat);
Mat dftMat = new Mat();
CvInvoke.Dft(complexMat, dftMat, DxtType.Forward, 0);
var mask = CreateBandPassMask(floatImage.Size, lowCutoff, highCutoff, filterType, order);
using (var dftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(dftMat, dftPlanes);
Mat real = dftPlanes[0];
Mat imag = dftPlanes[1];
CvInvoke.Multiply(real, mask.Mat, real);
CvInvoke.Multiply(imag, mask.Mat, imag);
using (var filteredPlanes = new Emgu.CV.Util.VectorOfMat())
{
filteredPlanes.Push(real);
filteredPlanes.Push(imag);
Mat filteredDft = new Mat();
CvInvoke.Merge(filteredPlanes, filteredDft);
Mat idftMat = new Mat();
CvInvoke.Dft(filteredDft, idftMat, DxtType.Inverse | DxtType.Scale, 0);
using (var idftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(idftMat, idftPlanes);
var result = new Image<Gray, float>(floatImage.Size);
idftPlanes[0].CopyTo(result);
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(result, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (maxVal > minVal)
{
result = (result - minVal) * (255.0 / (maxVal - minVal));
}
complexMat.Dispose();
dftMat.Dispose();
filteredDft.Dispose();
idftMat.Dispose();
_logger.Debug("Process: LowCutoff = {0}, HighCutoff = {1}, FilterType = {2}, Order = {3}", lowCutoff, highCutoff, filterType, order);
return result.Convert<Gray, byte>();
}
}
}
}
}
private Image<Gray, float> CreateBandPassMask(Size size, int lowCutoff, int highCutoff, string filterType, int order)
{
var mask = new Image<Gray, float>(size);
int cx = size.Width / 2;
int cy = size.Height / 2;
for (int y = 0; y < size.Height; y++)
{
for (int x = 0; x < size.Width; x++)
{
double dx = x - cx;
double dy = y - cy;
double distance = Math.Sqrt(dx * dx + dy * dy);
float value = 0;
switch (filterType)
{
case †æƒ³":
value = (distance >= lowCutoff && distance <= highCutoff) ? 1.0f : 0.0f;
break;
case "巴特沃斯":
double highPass = 1.0 / (1.0 + Math.Pow(lowCutoff / (distance + 0.001), 2 * order));
double lowPass = 1.0 / (1.0 + Math.Pow(distance / (highCutoff + 0.001), 2 * order));
value = (float)(highPass * lowPass);
break;
case "高斯":
double highPassGaussian = 1.0 - Math.Exp(-distance * distance / (2.0 * lowCutoff * lowCutoff));
double lowPassGaussian = Math.Exp(-distance * distance / (2.0 * highCutoff * highCutoff));
value = (float)(highPassGaussian * lowPassGaussian);
break;
}
mask.Data[y, x, 0] = value;
}
}
return mask;
}
}
@@ -0,0 +1,78 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? BilateralFilterProcessor.cs
// æè¿°: åŒè¾¹æ»¤æ³¢ç®—å­ï¼Œç”¨äºŽä¿è¾¹é™å™?
// 功能:
// - åŒè¾¹æ»¤æ³¢
// - ä¿æŒè¾¹ç¼˜æ¸…æ™°çš„åŒæ—¶å¹³æ»‘图åƒ?
// - å¯è°ƒèŠ‚æ ¸å¤§å°å’Œæ ‡å‡†å·®
// 算法: åŒè¾¹æ»¤æ³¢
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// åŒè¾¹æ»¤æ³¢ç®—å­
/// </summary>
public class BilateralFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<BilateralFilterProcessor>();
public BilateralFilterProcessor()
{
Name = LocalizationHelper.GetString("BilateralFilterProcessor_Name");
Description = LocalizationHelper.GetString("BilateralFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Diameter", new ProcessorParameter(
"Diameter",
LocalizationHelper.GetString("BilateralFilterProcessor_Diameter"),
typeof(int),
9,
1,
31,
LocalizationHelper.GetString("BilateralFilterProcessor_Diameter_Desc")));
Parameters.Add("SigmaColor", new ProcessorParameter(
"SigmaColor",
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaColor"),
typeof(double),
75.0,
1.0,
200.0,
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaColor_Desc")));
Parameters.Add("SigmaSpace", new ProcessorParameter(
"SigmaSpace",
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaSpace"),
typeof(double),
75.0,
1.0,
200.0,
LocalizationHelper.GetString("BilateralFilterProcessor_SigmaSpace_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int diameter = GetParameter<int>("Diameter");
double sigmaColor = GetParameter<double>("SigmaColor");
double sigmaSpace = GetParameter<double>("SigmaSpace");
var result = inputImage.Clone();
CvInvoke.BilateralFilter(inputImage, result, diameter, sigmaColor, sigmaSpace);
_logger.Debug("Process: Diameter = {Diameter}, SigmaColor = {SigmaColor}, SigmaSpace = {SigmaSpace}",
diameter, sigmaColor, sigmaSpace);
return result;
}
}
@@ -0,0 +1,69 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? GaussianBlurProcessor.cs
// æè¿°: 高斯模糊算å­ï¼Œç”¨äºŽå›¾åƒå¹³æ»‘å’Œé™å™ª
// 功能:
// - 高斯核å·ç§¯å¹³æ»?
// - å¯è°ƒèŠ‚æ ¸å¤§å°å’Œæ ‡å‡†å·®
// - 有效去除高斯噪声
// - ä¿æŒè¾¹ç¼˜ç›¸å¯¹æ¸…æ™°
// 算法: 高斯滤波器å·ç§?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 高斯模糊算å­
/// </summary>
public class GaussianBlurProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<GammaProcessor>();
public GaussianBlurProcessor()
{
Name = LocalizationHelper.GetString("GaussianBlurProcessor_Name");
Description = LocalizationHelper.GetString("GaussianBlurProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("GaussianBlurProcessor_KernelSize"),
typeof(int),
5,
1,
31,
LocalizationHelper.GetString("GaussianBlurProcessor_KernelSize_Desc")));
Parameters.Add("Sigma", new ProcessorParameter(
"Sigma",
LocalizationHelper.GetString("GaussianBlurProcessor_Sigma"),
typeof(double),
1.5,
0.1,
10.0,
LocalizationHelper.GetString("GaussianBlurProcessor_Sigma_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int kernelSize = GetParameter<int>("KernelSize");
double sigma = GetParameter<double>("Sigma");
if (kernelSize % 2 == 0) kernelSize++;
var result = inputImage.Clone();
CvInvoke.GaussianBlur(inputImage, result,
new System.Drawing.Size(kernelSize, kernelSize), sigma);
_logger.Debug("Process: KernelSize = {KernelSize}, Sigma = {Sigma}", kernelSize, sigma);
return result;
}
}
@@ -0,0 +1,148 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? HighPassFilterProcessor.cs
// æè¿°: 高通滤波算å­ï¼Œç”¨äºŽè¾¹ç¼˜å¢žå¼º
// 功能:
// - 高通滤波(频域�
// - 边缘增强
// - 去除低频信æ¯
// - å¯è°ƒèŠ‚æˆªæ­¢é¢‘çŽ?
// 算法: 高斯高通滤波器(频域)
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 高通滤波算�
/// </summary>
public class HighPassFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HighPassFilterProcessor>();
public HighPassFilterProcessor()
{
Name = LocalizationHelper.GetString("HighPassFilterProcessor_Name");
Description = LocalizationHelper.GetString("HighPassFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("CutoffFrequency", new ProcessorParameter(
"CutoffFrequency",
LocalizationHelper.GetString("HighPassFilterProcessor_CutoffFrequency"),
typeof(double),
30.0,
1.0,
200.0,
LocalizationHelper.GetString("HighPassFilterProcessor_CutoffFrequency_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double cutoffFrequency = GetParameter<double>("CutoffFrequency");
int rows = inputImage.Rows;
int cols = inputImage.Cols;
// 转æ¢ä¸ºæµ®ç‚¹åž‹
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
// åˆ›å»ºå¤æ•°å›¾åƒç”¨äºŽFFT
Mat complexImage = new Mat();
using (var planes = new Emgu.CV.Util.VectorOfMat())
{
planes.Push(floatImage.Mat);
planes.Push(Mat.Zeros(rows, cols, DepthType.Cv32F, 1));
CvInvoke.Merge(planes, complexImage);
}
// 执行DFT
Mat dftImage = new Mat();
CvInvoke.Dft(complexImage, dftImage, DxtType.Forward);
// 分离实部和虚�
using (var dftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(dftImage, dftPlanes);
Mat real = dftPlanes[0];
Mat imag = dftPlanes[1];
// 创建高通滤波器
Mat filter = CreateHighPassFilter(rows, cols, cutoffFrequency);
// 应用滤波�
CvInvoke.Multiply(real, filter, real);
CvInvoke.Multiply(imag, filter, imag);
// åˆå¹¶å¹¶æ‰§è¡Œé€†DFT
using (var filteredPlanes = new Emgu.CV.Util.VectorOfMat())
{
filteredPlanes.Push(real);
filteredPlanes.Push(imag);
Mat filteredDft = new Mat();
CvInvoke.Merge(filteredPlanes, filteredDft);
Mat ifftImage = new Mat();
CvInvoke.Dft(filteredDft, ifftImage, DxtType.Inverse | DxtType.Scale);
// 分离实部
using (var ifftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(ifftImage, ifftPlanes);
// 转æ¢å›žbyte类型
Mat resultMat = new Mat();
ifftPlanes[0].ConvertTo(resultMat, DepthType.Cv8U);
Image<Gray, byte> result = resultMat.ToImage<Gray, byte>();
// 释放资æº
floatImage.Dispose();
complexImage.Dispose();
dftImage.Dispose();
filter.Dispose();
filteredDft.Dispose();
ifftImage.Dispose();
resultMat.Dispose();
_logger.Debug("Process: CutoffFrequency = {CutoffFrequency}", cutoffFrequency);
return result;
}
}
}
}
/// <summary>
/// 创建高斯高通滤波器
/// </summary>
private Mat CreateHighPassFilter(int rows, int cols, double d0)
{
var filter = new Image<Gray, float>(cols, rows);
int centerX = cols / 2;
int centerY = rows / 2;
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
double distance = Math.Sqrt(Math.Pow(i - centerY, 2) + Math.Pow(j - centerX, 2));
float value = (float)(1 - Math.Exp(-(distance * distance) / (2 * d0 * d0)));
filter.Data[i, j, 0] = value;
}
}
return filter.Mat;
}
}
@@ -0,0 +1,148 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? LowPassFilterProcessor.cs
// æè¿°: 低通滤波算å­ï¼Œç”¨äºŽåŽ»é™¤é«˜é¢‘å™ªå£°
// 功能:
// - 低通滤波(频域�
// - 去除高频噪声
// - 平滑图åƒ
// - å¯è°ƒèŠ‚æˆªæ­¢é¢‘çŽ?
// 算法: 高斯低通滤波器(频域)
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 低通滤波算�
/// </summary>
public class LowPassFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<LowPassFilterProcessor>();
public LowPassFilterProcessor()
{
Name = LocalizationHelper.GetString("LowPassFilterProcessor_Name");
Description = LocalizationHelper.GetString("LowPassFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("CutoffFrequency", new ProcessorParameter(
"CutoffFrequency",
LocalizationHelper.GetString("LowPassFilterProcessor_CutoffFrequency"),
typeof(double),
30.0,
1.0,
200.0,
LocalizationHelper.GetString("LowPassFilterProcessor_CutoffFrequency_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double cutoffFrequency = GetParameter<double>("CutoffFrequency");
int rows = inputImage.Rows;
int cols = inputImage.Cols;
// 转æ¢ä¸ºæµ®ç‚¹åž‹
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
// åˆ›å»ºå¤æ•°å›¾åƒç”¨äºŽFFT
Mat complexImage = new Mat();
using (var planes = new Emgu.CV.Util.VectorOfMat())
{
planes.Push(floatImage.Mat);
planes.Push(Mat.Zeros(rows, cols, DepthType.Cv32F, 1));
CvInvoke.Merge(planes, complexImage);
}
// 执行DFT
Mat dftImage = new Mat();
CvInvoke.Dft(complexImage, dftImage, DxtType.Forward);
// 分离实部和虚�
using (var dftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(dftImage, dftPlanes);
Mat real = dftPlanes[0];
Mat imag = dftPlanes[1];
// 创建低通滤波器
Mat filter = CreateLowPassFilter(rows, cols, cutoffFrequency);
// 应用滤波�
CvInvoke.Multiply(real, filter, real);
CvInvoke.Multiply(imag, filter, imag);
// åˆå¹¶å¹¶æ‰§è¡Œé€†DFT
using (var filteredPlanes = new Emgu.CV.Util.VectorOfMat())
{
filteredPlanes.Push(real);
filteredPlanes.Push(imag);
Mat filteredDft = new Mat();
CvInvoke.Merge(filteredPlanes, filteredDft);
Mat ifftImage = new Mat();
CvInvoke.Dft(filteredDft, ifftImage, DxtType.Inverse | DxtType.Scale);
// 分离实部
using (var ifftPlanes = new Emgu.CV.Util.VectorOfMat())
{
CvInvoke.Split(ifftImage, ifftPlanes);
// 转æ¢å›žbyte类型
Mat resultMat = new Mat();
ifftPlanes[0].ConvertTo(resultMat, DepthType.Cv8U);
Image<Gray, byte> result = resultMat.ToImage<Gray, byte>();
// 释放资æº
floatImage.Dispose();
complexImage.Dispose();
dftImage.Dispose();
filter.Dispose();
filteredDft.Dispose();
ifftImage.Dispose();
resultMat.Dispose();
_logger.Debug("Process: CutoffFrequency = {CutoffFrequency}", cutoffFrequency);
return result;
}
}
}
}
/// <summary>
/// 创建高斯低通滤波器
/// </summary>
private Mat CreateLowPassFilter(int rows, int cols, double d0)
{
var filter = new Image<Gray, float>(cols, rows);
int centerX = cols / 2;
int centerY = rows / 2;
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
double distance = Math.Sqrt(Math.Pow(i - centerY, 2) + Math.Pow(j - centerX, 2));
float value = (float)Math.Exp(-(distance * distance) / (2 * d0 * d0));
filter.Data[i, j, 0] = value;
}
}
return filter.Mat;
}
}
@@ -0,0 +1,61 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? MeanFilterProcessor.cs
// æè¿°: å‡å€¼æ»¤æ³¢ç®—å­ï¼Œç”¨äºŽå›¾åƒå¹³æ»‘
// 功能:
// - å‡å€¼æ»¤æ³?
// - 简å•快速的平滑方法
// - å¯è°ƒèŠ‚æ ¸å¤§å°
// 算法: å‡å€¼æ»¤æ³?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// å‡å€¼æ»¤æ³¢ç®—å­?
/// </summary>
public class MeanFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MeanFilterProcessor>();
public MeanFilterProcessor()
{
Name = LocalizationHelper.GetString("MeanFilterProcessor_Name");
Description = LocalizationHelper.GetString("MeanFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("MeanFilterProcessor_KernelSize"),
typeof(int),
5,
1,
31,
LocalizationHelper.GetString("MeanFilterProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int kernelSize = GetParameter<int>("KernelSize");
// ç¡®ä¿æ ¸å¤§å°ä¸ºå¥‡æ•°
if (kernelSize % 2 == 0) kernelSize++;
var result = inputImage.Clone();
CvInvoke.Blur(inputImage, result, new Size(kernelSize, kernelSize), new Point(-1, -1));
_logger.Debug("Process: KernelSize = {KernelSize}", kernelSize);
return result;
}
}
@@ -0,0 +1,61 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? MedianFilterProcessor.cs
// æè¿°: 中值滤波算å­ï¼Œç”¨äºŽåŽ»é™¤æ¤’ç›å™ªå£°
// 功能:
// - 中值滤�
// - 有效去除椒ç›å™ªå£°
// - ä¿æŒè¾¹ç¼˜æ¸…æ™°
// - å¯è°ƒèŠ‚æ ¸å¤§å°
// 算法: 中值滤�
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 中值滤波算�
/// </summary>
public class MedianFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<MedianFilterProcessor>();
public MedianFilterProcessor()
{
Name = LocalizationHelper.GetString("MedianFilterProcessor_Name");
Description = LocalizationHelper.GetString("MedianFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("MedianFilterProcessor_KernelSize"),
typeof(int),
5,
1,
31,
LocalizationHelper.GetString("MedianFilterProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int kernelSize = GetParameter<int>("KernelSize");
// ç¡®ä¿æ ¸å¤§å°ä¸ºå¥‡æ•°
if (kernelSize % 2 == 0) kernelSize++;
var result = inputImage.Clone();
CvInvoke.MedianBlur(inputImage, result, kernelSize);
_logger.Debug("Process: KernelSize = {KernelSize}", kernelSize);
return result;
}
}
@@ -0,0 +1,123 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? ShockFilterProcessor.cs
// æè¿°: 冲击滤波算å­ï¼Œç”¨äºŽå›¾åƒé”化和边缘增强
// 功能:
// - 基于PDE的图åƒé”åŒ?
// - å¢žå¼ºè¾¹ç¼˜åŒæ—¶ä¿æŒå¹³æ»‘区域
// - å¯è°ƒèŠ‚è¿­ä»£æ¬¡æ•°å’Œæ»¤æ³¢å¼ºåº¦
// - 适用于模糊图åƒçš„æ¢å¤
// 算法: 冲击滤波器(Shock Filter)基于å微分方程
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 冲击滤波算å­
/// </summary>
public class ShockFilterProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ShockFilterProcessor>();
public ShockFilterProcessor()
{
Name = LocalizationHelper.GetString("ShockFilterProcessor_Name");
Description = LocalizationHelper.GetString("ShockFilterProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Iterations", new ProcessorParameter(
"Iterations",
LocalizationHelper.GetString("ShockFilterProcessor_Iterations"),
typeof(int),
5,
1,
20,
LocalizationHelper.GetString("ShockFilterProcessor_Iterations_Desc")));
Parameters.Add("Theta", new ProcessorParameter(
"Theta",
LocalizationHelper.GetString("ShockFilterProcessor_Theta"),
typeof(double),
0.5,
0.0,
2.0,
LocalizationHelper.GetString("ShockFilterProcessor_Theta_Desc")));
Parameters.Add("Dt", new ProcessorParameter(
"Dt",
LocalizationHelper.GetString("ShockFilterProcessor_Dt"),
typeof(double),
0.25,
0.1,
1.0,
LocalizationHelper.GetString("ShockFilterProcessor_Dt_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int iterations = GetParameter<int>("Iterations");
double theta = GetParameter<double>("Theta");
double dt = GetParameter<double>("Dt");
var result = inputImage.Convert<Gray, float>();
for (int iter = 0; iter < iterations; iter++)
{
result = ShockFilterIteration(result, theta, dt);
}
_logger.Debug("Process: Iterations = {Iterations}, Theta = {Theta}, Dt = {Dt}", iterations, theta, dt);
return result.Convert<Gray, byte>();
}
private Image<Gray, float> ShockFilterIteration(Image<Gray, float> input, double theta, double dt)
{
int width = input.Width;
int height = input.Height;
var output = new Image<Gray, float>(width, height);
for (int y = 1; y < height - 1; y++)
{
for (int x = 1; x < width - 1; x++)
{
float dx = (input.Data[y, x + 1, 0] - input.Data[y, x - 1, 0]) / 2.0f;
float dy = (input.Data[y + 1, x, 0] - input.Data[y - 1, x, 0]) / 2.0f;
float gradMag = (float)Math.Sqrt(dx * dx + dy * dy);
float dxx = input.Data[y, x + 1, 0] - 2 * input.Data[y, x, 0] + input.Data[y, x - 1, 0];
float dyy = input.Data[y + 1, x, 0] - 2 * input.Data[y, x, 0] + input.Data[y - 1, x, 0];
float laplacian = dxx + dyy;
float sign = laplacian > 0 ? 1.0f : -1.0f;
if (gradMag > theta)
{
output.Data[y, x, 0] = input.Data[y, x, 0] - (float)(dt * sign * gradMag);
}
else
{
output.Data[y, x, 0] = input.Data[y, x, 0];
}
}
}
for (int x = 0; x < width; x++)
{
output.Data[0, x, 0] = input.Data[0, x, 0];
output.Data[height - 1, x, 0] = input.Data[height - 1, x, 0];
}
for (int y = 0; y < height; y++)
{
output.Data[y, 0, 0] = input.Data[y, 0, 0];
output.Data[y, width - 1, 0] = input.Data[y, width - 1, 0];
}
return output;
}
}
@@ -0,0 +1,199 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? HorizontalEdgeProcessor.cs
// æè¿°: 水平边缘检测算å­ï¼Œä¸“门用于检测水平方å‘的边缘
// 功能:
// - 检测水平边�
// - 支æŒPrewittå’ŒSobelç®—å­
// - å¯è°ƒèŠ‚æ£€æµ‹çµæ•度
// - 适用于检测水平线æ¡å’Œçº¹ç
// 算法: Prewitt/Sobel水平算å­
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 水平边缘检测算�
/// </summary>
public class HorizontalEdgeProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HorizontalEdgeProcessor>();
public HorizontalEdgeProcessor()
{
Name = LocalizationHelper.GetString("HorizontalEdgeProcessor_Name");
Description = LocalizationHelper.GetString("HorizontalEdgeProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HorizontalEdgeProcessor_Method"),
typeof(string),
"Sobel",
null,
null,
LocalizationHelper.GetString("HorizontalEdgeProcessor_Method_Desc"),
new string[] { "Sobel", "Prewitt", "Simple" }));
Parameters.Add("Sensitivity", new ProcessorParameter(
"Sensitivity",
LocalizationHelper.GetString("HorizontalEdgeProcessor_Sensitivity"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("HorizontalEdgeProcessor_Sensitivity_Desc")));
Parameters.Add("Threshold", new ProcessorParameter(
"Threshold",
LocalizationHelper.GetString("HorizontalEdgeProcessor_Threshold"),
typeof(int),
20,
0,
255,
LocalizationHelper.GetString("HorizontalEdgeProcessor_Threshold_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double sensitivity = GetParameter<double>("Sensitivity");
int threshold = GetParameter<int>("Threshold");
Image<Gray, byte> result;
if (method == "Sobel")
{
result = ApplySobel(inputImage, sensitivity, threshold);
}
else if (method == "Prewitt")
{
result = ApplyPrewitt(inputImage, sensitivity, threshold);
}
else // Simple
{
result = ApplySimple(inputImage, sensitivity, threshold);
}
_logger.Debug("Process: Method = {Method}, Sensitivity = {Sensitivity}, Threshold = {Threshold}",
method, sensitivity, threshold);
return result;
}
private Image<Gray, byte> ApplySobel(Image<Gray, byte> inputImage, double sensitivity, int threshold)
{
// 使用Sobelç®—å­æ£€æµ‹æ°´å¹³è¾¹ç¼˜ï¼ˆYæ–¹å‘导数ï¼?
Image<Gray, float> sobelY = new Image<Gray, float>(inputImage.Size);
CvInvoke.Sobel(inputImage, sobelY, DepthType.Cv32F, 0, 1, 3);
// 转æ¢ä¸ºç»å¯¹å€¼å¹¶åº”ç”¨çµæ•åº?
Image<Gray, byte> result = new Image<Gray, byte>(inputImage.Size);
CvInvoke.ConvertScaleAbs(sobelY, result, sensitivity, 0);
// 应用阈�
if (threshold > 0)
{
CvInvoke.Threshold(result, result, threshold, 255, ThresholdType.Binary);
CvInvoke.Threshold(result, result, 0, 255, ThresholdType.ToZero);
}
sobelY.Dispose();
return result;
}
private Image<Gray, byte> ApplyPrewitt(Image<Gray, byte> inputImage, double sensitivity, int threshold)
{
// Prewitt水平算å­
// [ 1 1 1]
// [ 0 0 0]
// [-1 -1 -1]
int width = inputImage.Width;
int height = inputImage.Height;
byte[,,] inputData = inputImage.Data;
Image<Gray, byte> result = new Image<Gray, byte>(width, height);
byte[,,] outputData = result.Data;
for (int y = 1; y < height - 1; y++)
{
for (int x = 1; x < width - 1; x++)
{
int sum = 0;
// 上行
sum += inputData[y - 1, x - 1, 0];
sum += inputData[y - 1, x, 0];
sum += inputData[y - 1, x + 1, 0];
// 下行
sum -= inputData[y + 1, x - 1, 0];
sum -= inputData[y + 1, x, 0];
sum -= inputData[y + 1, x + 1, 0];
// å–ç»å¯¹å€¼å¹¶åº”ç”¨çµæ•åº?
int value = (int)(Math.Abs(sum) * sensitivity);
// 应用阈�
if (value > threshold)
{
outputData[y, x, 0] = (byte)Math.Min(255, value);
}
else
{
outputData[y, x, 0] = 0;
}
}
}
return result;
}
private Image<Gray, byte> ApplySimple(Image<Gray, byte> inputImage, double sensitivity, int threshold)
{
// 简å•差分算å­?
// [ 1 1 1]
// [ 0 0 0]
// [-1 -1 -1]
// 但æƒé‡æ›´ç®€å?
int width = inputImage.Width;
int height = inputImage.Height;
byte[,,] inputData = inputImage.Data;
Image<Gray, byte> result = new Image<Gray, byte>(width, height);
byte[,,] outputData = result.Data;
for (int y = 1; y < height - 1; y++)
{
for (int x = 0; x < width; x++)
{
// 简å•的上下差分
int diff = inputData[y - 1, x, 0] - inputData[y + 1, x, 0];
int value = (int)(Math.Abs(diff) * sensitivity);
// 应用阈�
if (value > threshold)
{
outputData[y, x, 0] = (byte)Math.Min(255, value);
}
else
{
outputData[y, x, 0] = 0;
}
}
}
return result;
}
}
@@ -0,0 +1,133 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? KirschEdgeProcessor.cs
// 讛膩: Kirsch颲寧瘚讠摮琜瘚见㦛讛器蝻?
// :
// - Kirsch蝞堒颲寧瘚?
// - 8銝芣䲮𤑳颲寧瘚?
// - 颲枏枂憭批摨娍䲮𤑳颲寧
// - 撖孵臁憯唳笔漲雿?
// 蝞埈: Kirsch蝞堒嚗?璅⊥踎嚗?
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// Kirsch颲寧瘚讠摮?
/// </summary>
public class KirschEdgeProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<KirschEdgeProcessor>();
// Kirsch蝞堒?銝芣䲮烐芋?
private static readonly int[][,] KirschKernels = new int[8][,]
{
// N
new int[,] { { 5, 5, 5 }, { -3, 0, -3 }, { -3, -3, -3 } },
// NW
new int[,] { { 5, 5, -3 }, { 5, 0, -3 }, { -3, -3, -3 } },
// W
new int[,] { { 5, -3, -3 }, { 5, 0, -3 }, { 5, -3, -3 } },
// SW
new int[,] { { -3, -3, -3 }, { 5, 0, -3 }, { 5, 5, -3 } },
// S
new int[,] { { -3, -3, -3 }, { -3, 0, -3 }, { 5, 5, 5 } },
// SE
new int[,] { { -3, -3, -3 }, { -3, 0, 5 }, { -3, 5, 5 } },
// E
new int[,] { { -3, -3, 5 }, { -3, 0, 5 }, { -3, -3, 5 } },
// NE
new int[,] { { -3, 5, 5 }, { -3, 0, 5 }, { -3, -3, -3 } }
};
public KirschEdgeProcessor()
{
Name = LocalizationHelper.GetString("KirschEdgeProcessor_Name");
Description = LocalizationHelper.GetString("KirschEdgeProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Threshold", new ProcessorParameter(
"Threshold",
LocalizationHelper.GetString("KirschEdgeProcessor_Threshold"),
typeof(int),
100,
0,
1000,
LocalizationHelper.GetString("KirschEdgeProcessor_Threshold_Desc")));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("KirschEdgeProcessor_Scale"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("KirschEdgeProcessor_Scale_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int threshold = GetParameter<int>("Threshold");
double scale = GetParameter<double>("Scale");
int width = inputImage.Width;
int height = inputImage.Height;
byte[,,] inputData = inputImage.Data;
Image<Gray, byte> result = new Image<Gray, byte>(width, height);
byte[,,] outputData = result.Data;
// 撖寞銝芸蝝惩?銝枝irsch璅⊥踎嚗憭批摨?
for (int y = 1; y < height - 1; y++)
{
for (int x = 1; x < width - 1; x++)
{
int maxResponse = 0;
// 撖?銝芣䲮怨恣蝞?
for (int k = 0; k < 8; k++)
{
int sum = 0;
for (int ky = 0; ky < 3; ky++)
{
for (int kx = 0; kx < 3; kx++)
{
int pixelValue = inputData[y + ky - 1, x + kx - 1, 0];
sum += pixelValue * KirschKernels[k][ky, kx];
}
}
// 𣇉撖孵?
sum = Math.Abs(sum);
if (sum > maxResponse)
{
maxResponse = sum;
}
}
// 摨𠉛鍂蝻拇𦆮
if (maxResponse > threshold)
{
int value = (int)(maxResponse * scale);
outputData[y, x, 0] = (byte)Math.Min(255, Math.Max(0, value));
}
else
{
outputData[y, x, 0] = 0;
}
}
}
_logger.Debug("Process: Threshold = {Threshold}, Scale = {Scale}", threshold, scale);
return result;
}
}
@@ -0,0 +1,135 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? SobelEdgeProcessor.cs
// æè¿°: Sobel边缘检测算å­ï¼Œç”¨äºŽæ£€æµ‹å›¾åƒè¾¹ç¼?
// 功能:
// - Sobelç®—å­è¾¹ç¼˜æ£€æµ?
// - 支æŒXæ–¹å‘ã€Yæ–¹å‘å’Œç»„åˆæ£€æµ?
// - å¯è°ƒèŠ‚æ ¸å¤§å°
// - 输出边缘强度�
// 算法: Sobelç®—å­
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// Sobel边缘检测算�
/// </summary>
public class SobelEdgeProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SobelEdgeProcessor>();
public SobelEdgeProcessor()
{
Name = LocalizationHelper.GetString("SobelEdgeProcessor_Name");
Description = LocalizationHelper.GetString("SobelEdgeProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Direction", new ProcessorParameter(
"Direction",
LocalizationHelper.GetString("SobelEdgeProcessor_Direction"),
typeof(string),
"Both",
null,
null,
LocalizationHelper.GetString("SobelEdgeProcessor_Direction_Desc"),
new string[] { "Both", "Horizontal", "Vertical" }));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("SobelEdgeProcessor_KernelSize"),
typeof(int),
3,
1,
7,
LocalizationHelper.GetString("SobelEdgeProcessor_KernelSize_Desc")));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("SobelEdgeProcessor_Scale"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("SobelEdgeProcessor_Scale_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string direction = GetParameter<string>("Direction");
int kernelSize = GetParameter<int>("KernelSize");
double scale = GetParameter<double>("Scale");
// ç¡®ä¿æ ¸å¤§å°ä¸ºå¥‡æ•°
if (kernelSize % 2 == 0) kernelSize++;
if (kernelSize > 7) kernelSize = 7;
if (kernelSize < 1) kernelSize = 1;
Image<Gray, float> sobelX = new Image<Gray, float>(inputImage.Size);
Image<Gray, float> sobelY = new Image<Gray, float>(inputImage.Size);
Image<Gray, byte> result = new Image<Gray, byte>(inputImage.Size);
if (direction == "Horizontal" || direction == "Both")
{
// Xæ–¹å‘(水平边缘)
CvInvoke.Sobel(inputImage, sobelX, DepthType.Cv32F, 1, 0, kernelSize);
}
if (direction == "Vertical" || direction == "Both")
{
// Yæ–¹å‘(垂直边缘)
CvInvoke.Sobel(inputImage, sobelY, DepthType.Cv32F, 0, 1, kernelSize);
}
if (direction == "Both")
{
// 计算梯度幅值:sqrt(Gx^2 + Gy^2)
Image<Gray, float> magnitude = new Image<Gray, float>(inputImage.Size);
// 手动计算幅�
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
float gx = sobelX.Data[y, x, 0];
float gy = sobelY.Data[y, x, 0];
magnitude.Data[y, x, 0] = (float)Math.Sqrt(gx * gx + gy * gy);
}
}
// 应用缩放并转æ¢ä¸ºå­—节类型
var scaled = magnitude * scale;
result = scaled.Convert<Gray, byte>();
magnitude.Dispose();
scaled.Dispose();
}
else if (direction == "Horizontal")
{
// åªä½¿ç”¨Xæ–¹å
CvInvoke.ConvertScaleAbs(sobelX, result, scale, 0);
}
else // Vertical
{
// åªä½¿ç”¨Yæ–¹å
CvInvoke.ConvertScaleAbs(sobelY, result, scale, 0);
}
sobelX.Dispose();
sobelY.Dispose();
_logger.Debug("Process: Direction = {Direction}, KernelSize = {KernelSize}, Scale = {Scale}",
direction, kernelSize, scale);
return result;
}
}