规范类名及命名空间名称

This commit is contained in:
李伟
2026-04-13 14:35:37 +08:00
parent c430ec229b
commit ace1c70ddf
217 changed files with 1271 additions and 1384 deletions
@@ -0,0 +1,256 @@
// ============================================================================
// Copyright ツゥ 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 譁サカ蜷? ColorLayerProcessor.cs
// 謠剰ソー: 濶イ蠖ゥ蛻アらョ怜ュ撰シ悟ー蠎ヲ蝗セ蜒乗潔莠ョ蠎ヲ蛹コ髣エ蛻ア?
// 蜉溯:
// - 蟆蠎ヲ蝗セ蜒乗潔謖ョ壼アよ焚蝮劇蛻
// - 謾ッ謖∬螳壻ケ牙螻よ焚?~16螻ゑシ
// - 謾ッ謖∝插蛹€蛻アょ柱蝓コ莠?Otsu 逧騾ょコ泌
// - 蜿ッ騾我ソ晉蕗蜴溷ァ狗蠎ヲ謌匁丐蟆クコ遲蛾龍霍晉蠎ヲ
// 邂玲ウ: 轣ー蠎ヲ驥丞喧 / 螟夐蛟シ蛻?
// 菴懆€? 譚惹シ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 濶イ蠖ゥ蛻アらョ怜ュ撰シ悟ー蠎ヲ蝗セ蜒乗潔莠ョ蠎ヲ蛹コ髣エ蛻クコ螟壻クェ螻らコ?
/// </summary>
public class ColorLayerProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ColorLayerProcessor>();
public ColorLayerProcessor()
{
Name = LocalizationHelper.GetString("ColorLayerProcessor_Name");
Description = LocalizationHelper.GetString("ColorLayerProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Layers", new ProcessorParameter(
"Layers",
LocalizationHelper.GetString("ColorLayerProcessor_Layers"),
typeof(int),
4,
2,
16,
LocalizationHelper.GetString("ColorLayerProcessor_Layers_Desc")));
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("ColorLayerProcessor_Method"),
typeof(string),
"Uniform",
null,
null,
LocalizationHelper.GetString("ColorLayerProcessor_Method_Desc"),
new string[] { "Uniform", "Otsu" }));
Parameters.Add("OutputMode", new ProcessorParameter(
"OutputMode",
LocalizationHelper.GetString("ColorLayerProcessor_OutputMode"),
typeof(string),
"EqualSpaced",
null,
null,
LocalizationHelper.GetString("ColorLayerProcessor_OutputMode_Desc"),
new string[] { "EqualSpaced", "MidValue" }));
Parameters.Add("TargetLayer", new ProcessorParameter(
"TargetLayer",
LocalizationHelper.GetString("ColorLayerProcessor_TargetLayer"),
typeof(int),
0,
0,
16,
LocalizationHelper.GetString("ColorLayerProcessor_TargetLayer_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int layers = GetParameter<int>("Layers");
string method = GetParameter<string>("Method");
string outputMode = GetParameter<string>("OutputMode");
int targetLayer = GetParameter<int>("TargetLayer");
// 髯仙宛 targetLayer 闌
if (targetLayer < 0 || targetLayer > layers)
targetLayer = 0;
_logger.Debug("Process: Layers={Layers}, Method={Method}, OutputMode={OutputMode}, TargetLayer={TargetLayer}",
layers, method, outputMode, targetLayer);
// 隶。邂怜螻る蛟?
byte[] thresholds = method == "Otsu"
? ComputeOtsuMultiThresholds(inputImage, layers)
: ComputeUniformThresholds(layers);
// 隶。邂玲ッ丞アら噪霎灘轣ー蠎ヲ蛟?
byte[] layerValues = ComputeLayerValues(thresholds, layers, outputMode);
// 蠎皮畑蛻アよ丐蟆
int width = inputImage.Width;
int height = inputImage.Height;
var result = new Image<Gray, byte>(width, height);
var srcData = inputImage.Data;
var dstData = result.Data;
if (targetLayer == 0)
{
// 霎灘蜈ィ驛ィ螻?
Parallel.For(0, height, y =>
{
for (int x = 0; x < width; x++)
{
byte pixel = srcData[y, x, 0];
int layerIdx = GetLayerIndex(pixel, thresholds);
dstData[y, x, 0] = layerValues[layerIdx];
}
});
}
else
{
// 蜿ェ霎灘ョ壼アゑシ夐€我クュ螻ゆクコ 255育區会シ悟菴吩ク?0磯サ托シ?
int target = targetLayer - 1; // 蜿よ焚莉?蠑€蟋具シ悟驛ィ邏「蠑穂サ?蠑€蟋?
Parallel.For(0, height, y =>
{
for (int x = 0; x < width; x++)
{
byte pixel = srcData[y, x, 0];
int layerIdx = GetLayerIndex(pixel, thresholds);
dstData[y, x, 0] = (layerIdx == target) ? (byte)255 : (byte)0;
}
});
}
_logger.Debug("Process completed: {Layers} layers, target={TargetLayer}", layers, targetLayer);
return result;
}
/// <summary>
/// 蝮劇蛻アる蛟シ壼ー?[0, 255] 遲牙
/// </summary>
private static byte[] ComputeUniformThresholds(int layers)
{
var thresholds = new byte[layers - 1];
double step = 256.0 / layers;
for (int i = 0; i < layers - 1; i++)
thresholds[i] = (byte)Math.Clamp((int)((i + 1) * step), 0, 255);
return thresholds;
}
/// <summary>
/// 蝓コ莠 Otsu 逧、夐蛟シ蛻アゑシ夐€貞ス剃コ悟
/// </summary>
private static byte[] ComputeOtsuMultiThresholds(Image<Gray, byte> image, int layers)
{
// 隶。邂礼峩譁ケ蝗?
int[] histogram = new int[256];
var data = image.Data;
int h = image.Height, w = image.Width;
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
histogram[data[y, x, 0]]++;
// 騾貞ス Otsu 蛻
var thresholds = new List<byte>();
RecursiveOtsu(histogram, 0, 255, layers, thresholds);
thresholds.Sort();
return thresholds.ToArray();
}
/// <summary>
/// 騾貞ス Otsu壼惠 [low, high] 闌峩蜀伽譛€菴ウ髦亥€シ檎┯蜷朱€貞ス貞蜑イ
/// </summary>
private static void RecursiveOtsu(int[] histogram, int low, int high, int layers, List<byte> thresholds)
{
if (layers <= 1 || low >= high)
return;
// 蝨?[low, high] 闌峩蜀伽 Otsu 髦亥€?
long totalPixels = 0;
long totalSum = 0;
for (int i = low; i <= high; i++)
{
totalPixels += histogram[i];
totalSum += (long)i * histogram[i];
}
if (totalPixels == 0) return;
long bgPixels = 0, bgSum = 0;
double maxVariance = 0;
int bestThreshold = (low + high) / 2;
for (int t = low; t < high; t++)
{
bgPixels += histogram[t];
bgSum += (long)t * histogram[t];
long fgPixels = totalPixels - bgPixels;
if (bgPixels == 0 || fgPixels == 0) continue;
double bgMean = (double)bgSum / bgPixels;
double fgMean = (double)(totalSum - bgSum) / fgPixels;
double variance = (double)bgPixels * fgPixels * (bgMean - fgMean) * (bgMean - fgMean);
if (variance > maxVariance)
{
maxVariance = variance;
bestThreshold = t;
}
}
thresholds.Add((byte)bestThreshold);
// 騾貞ス貞蜑イ蟾ヲ蜿ウ荳、蜊
int leftLayers = layers / 2;
int rightLayers = layers - leftLayers;
RecursiveOtsu(histogram, low, bestThreshold, leftLayers, thresholds);
RecursiveOtsu(histogram, bestThreshold + 1, high, rightLayers, thresholds);
}
/// <summary>
/// 隶。邂玲ッ丞アら噪霎灘轣ー蠎ヲ蛟?
/// </summary>
private static byte[] ComputeLayerValues(byte[] thresholds, int layers, string outputMode)
{
var values = new byte[layers];
if (outputMode == "EqualSpaced")
{
// 遲蛾龍霍晁セ灘0, 255/(n-1), 2*255/(n-1), ..., 255
for (int i = 0; i < layers; i++)
values[i] = (byte)Math.Clamp((int)(255.0 * i / (layers - 1)), 0, 255);
}
else // MidValue
{
// 豈丞アょ叙蛹コ髣エ荳ュ蛟?
values[0] = (byte)(thresholds.Length > 0 ? thresholds[0] / 2 : 128);
for (int i = 1; i < layers - 1; i++)
values[i] = (byte)((thresholds[i - 1] + thresholds[i]) / 2);
values[layers - 1] = (byte)(thresholds.Length > 0 ? (thresholds[^1] + 255) / 2 : 128);
}
return values;
}
/// <summary>
/// 譬ケ謐ョ髦亥€シ謨ー扈。ョ螳壼ワ邏謇€螻槫アらコ?
/// </summary>
private static int GetLayerIndex(byte pixel, byte[] thresholds)
{
for (int i = 0; i < thresholds.Length; i++)
{
if (pixel < thresholds[i])
return i;
}
return thresholds.Length;
}
}
@@ -0,0 +1,172 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? ContrastProcessor.cs
// 讛膩: 撖寞摨西摮琜憓𧼮撩撖寞摨?
// :
// - 蝥踵批笆瘥𥪜漲䔶漁摨西?
// - 芸𢆡撖寞摨行隡?
// - CLAHE嚗笆瘥𥪜漲湔䲮銵∪嚗?
// - 憭𡁶撖寞摨血撘箸䲮瘜?
// 蝞埈: 蝥踵孵㦛LAHE
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 撖寞摨西摮?
/// </summary>
public class ContrastProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<ContrastProcessor>();
public ContrastProcessor()
{
Name = LocalizationHelper.GetString("ContrastProcessor_Name");
Description = LocalizationHelper.GetString("ContrastProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Contrast", new ProcessorParameter(
"Contrast",
LocalizationHelper.GetString("ContrastProcessor_Contrast"),
typeof(double),
1.0,
0.1,
3.0,
LocalizationHelper.GetString("ContrastProcessor_Contrast_Desc")));
Parameters.Add("Brightness", new ProcessorParameter(
"Brightness",
LocalizationHelper.GetString("ContrastProcessor_Brightness"),
typeof(int),
0,
-100,
100,
LocalizationHelper.GetString("ContrastProcessor_Brightness_Desc")));
Parameters.Add("AutoContrast", new ProcessorParameter(
"AutoContrast",
LocalizationHelper.GetString("ContrastProcessor_AutoContrast"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContrastProcessor_AutoContrast_Desc")));
Parameters.Add("UseCLAHE", new ProcessorParameter(
"UseCLAHE",
LocalizationHelper.GetString("ContrastProcessor_UseCLAHE"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("ContrastProcessor_UseCLAHE_Desc")));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("ContrastProcessor_ClipLimit"),
typeof(double),
2.0,
1.0,
10.0,
LocalizationHelper.GetString("ContrastProcessor_ClipLimit_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double contrast = GetParameter<double>("Contrast");
int brightness = GetParameter<int>("Brightness");
bool autoContrast = GetParameter<bool>("AutoContrast");
bool useCLAHE = GetParameter<bool>("UseCLAHE");
double clipLimit = GetParameter<double>("ClipLimit");
var result = inputImage.Clone();
if (useCLAHE)
{
result = ApplyCLAHE(inputImage, clipLimit);
}
else if (autoContrast)
{
result = AutoContrastStretch(inputImage);
}
else
{
result = inputImage * contrast + brightness;
}
_logger.Debug("Process: Contrast = {contrast},Brightness = {brightness}," +
"AutoContrast = {autoContrast},UseCLAHE = {useCLAHE}, ClipLimit = {clipLimit}", contrast, brightness, autoContrast, useCLAHE, clipLimit);
return result;
}
private Image<Gray, byte> AutoContrastStretch(Image<Gray, byte> inputImage)
{
double minVal = 0, maxVal = 0;
Point minLoc = new Point();
Point maxLoc = new Point();
CvInvoke.MinMaxLoc(inputImage, ref minVal, ref maxVal, ref minLoc, ref maxLoc);
if (minVal == 0 && maxVal == 255)
{
return inputImage.Clone();
}
var floatImage = inputImage.Convert<Gray, float>();
if (maxVal > minVal)
{
floatImage = (floatImage - minVal) * (255.0 / (maxVal - minVal));
}
_logger.Debug("AutoContrastStretch");
return floatImage.Convert<Gray, byte>();
}
private Image<Gray, byte> ApplyCLAHE(Image<Gray, byte> inputImage, double clipLimit)
{
int tileSize = 8;
int width = inputImage.Width;
int height = inputImage.Height;
int tilesX = (width + tileSize - 1) / tileSize;
int tilesY = (height + tileSize - 1) / tileSize;
var result = new Image<Gray, byte>(width, height);
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int x = tx * tileSize;
int y = ty * tileSize;
int w = Math.Min(tileSize, width - x);
int h = Math.Min(tileSize, height - y);
var roi = new System.Drawing.Rectangle(x, y, w, h);
inputImage.ROI = roi;
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
var equalizedTile = new Image<Gray, byte>(tile.Size);
CvInvoke.EqualizeHist(tile, equalizedTile);
result.ROI = roi;
equalizedTile.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
tile.Dispose();
equalizedTile.Dispose();
}
}
_logger.Debug("ApplyCLAHE");
return result;
}
}
@@ -0,0 +1,100 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? GammaProcessor.cs
// æè¿°: Gamma校正算å­ï¼Œç”¨äºŽè°ƒæ•´å›¾åƒäº®åº¦å’Œå¯¹æ¯”åº?
// 功能:
// - Gammaéžçº¿æ€§æ ¡æ­?
// - 增益调整
// - 使用查找表(LUT)加速处ç?
// - é€‚ç”¨äºŽå›¾åƒæ˜¾ç¤ºå’Œäº®åº¦è°ƒæ•´
// 算法: Gammaæ ¡æ­£å…¬å¼ output = (input^(1/gamma)) * gain
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// Gamma校正算å­
/// </summary>
public class GammaProcessor : ImageProcessorBase
{
private byte[] _lookupTable;
private static readonly ILogger _logger = Log.ForContext<GammaProcessor>();
public GammaProcessor()
{
Name = LocalizationHelper.GetString("GammaProcessor_Name");
Description = LocalizationHelper.GetString("GammaProcessor_Description");
_lookupTable = new byte[256];
}
protected override void InitializeParameters()
{
Parameters.Add("Gamma", new ProcessorParameter(
"Gamma",
LocalizationHelper.GetString("GammaProcessor_Gamma"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("GammaProcessor_Gamma_Desc")));
Parameters.Add("Gain", new ProcessorParameter(
"Gain",
LocalizationHelper.GetString("GammaProcessor_Gain"),
typeof(double),
1.0,
0.1,
3.0,
LocalizationHelper.GetString("GammaProcessor_Gain_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double gamma = GetParameter<double>("Gamma");
double gain = GetParameter<double>("Gain");
BuildLookupTable(gamma, gain);
var result = inputImage.Clone();
ApplyLookupTable(result);
_logger.Debug("Process:Gamma = {0}, Gain = {1}", gamma, gain);
return result;
}
private void BuildLookupTable(double gamma, double gain)
{
double invGamma = 1.0 / gamma;
for (int i = 0; i < 256; i++)
{
double normalized = i / 255.0;
double corrected = Math.Pow(normalized, invGamma) * gain;
int value = (int)(corrected * 255.0);
_lookupTable[i] = (byte)Math.Max(0, Math.Min(255, value));
}
_logger.Debug("Gamma and gain values recorded: gamma = {Gamma}, gain = {Gain}", gamma, gain);
}
private void ApplyLookupTable(Image<Gray, byte> image)
{
int width = image.Width;
int height = image.Height;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte pixelValue = image.Data[y, x, 0];
image.Data[y, x, 0] = _lookupTable[pixelValue];
}
}
}
}
@@ -0,0 +1,549 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? HDREnhancementProcessor.cs
// æè¿°: 高动æ€èŒƒå›´ï¼ˆHDR)图åƒå¢žå¼ºç®—å­?
// 功能:
// - 局部色调映射(Local Tone Mapping�
// - 自适应对数映射(Adaptive Logarithmic Mapping�
// - Drago色调映射
// - åŒè¾¹æ»¤æ³¢è‰²è°ƒæ˜ å°„
// - å¢žå¼ºå›¾åƒæš—部和亮部细èŠ?
// 算法: 基于色调映射的HDR增强
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 高动æ€èŒƒå›´å›¾åƒå¢žå¼ºç®—å­?
/// </summary>
public class HDREnhancementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HDREnhancementProcessor>();
public HDREnhancementProcessor()
{
Name = LocalizationHelper.GetString("HDREnhancementProcessor_Name");
Description = LocalizationHelper.GetString("HDREnhancementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HDREnhancementProcessor_Method"),
typeof(string),
"LocalToneMap",
null,
null,
LocalizationHelper.GetString("HDREnhancementProcessor_Method_Desc"),
new string[] { "LocalToneMap", "AdaptiveLog", "Drago", "BilateralToneMap" }));
Parameters.Add("Gamma", new ProcessorParameter(
"Gamma",
LocalizationHelper.GetString("HDREnhancementProcessor_Gamma"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Gamma_Desc")));
Parameters.Add("Saturation", new ProcessorParameter(
"Saturation",
LocalizationHelper.GetString("HDREnhancementProcessor_Saturation"),
typeof(double),
1.0,
0.0,
3.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Saturation_Desc")));
Parameters.Add("DetailBoost", new ProcessorParameter(
"DetailBoost",
LocalizationHelper.GetString("HDREnhancementProcessor_DetailBoost"),
typeof(double),
1.5,
0.0,
5.0,
LocalizationHelper.GetString("HDREnhancementProcessor_DetailBoost_Desc")));
Parameters.Add("SigmaSpace", new ProcessorParameter(
"SigmaSpace",
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaSpace"),
typeof(double),
20.0,
1.0,
100.0,
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaSpace_Desc")));
Parameters.Add("SigmaColor", new ProcessorParameter(
"SigmaColor",
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaColor"),
typeof(double),
30.0,
1.0,
100.0,
LocalizationHelper.GetString("HDREnhancementProcessor_SigmaColor_Desc")));
Parameters.Add("Bias", new ProcessorParameter(
"Bias",
LocalizationHelper.GetString("HDREnhancementProcessor_Bias"),
typeof(double),
0.85,
0.0,
1.0,
LocalizationHelper.GetString("HDREnhancementProcessor_Bias_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double gamma = GetParameter<double>("Gamma");
double saturation = GetParameter<double>("Saturation");
double detailBoost = GetParameter<double>("DetailBoost");
double sigmaSpace = GetParameter<double>("SigmaSpace");
double sigmaColor = GetParameter<double>("SigmaColor");
double bias = GetParameter<double>("Bias");
Image<Gray, byte> result;
switch (method)
{
case "AdaptiveLog":
result = AdaptiveLogarithmicMapping(inputImage, gamma, bias);
break;
case "Drago":
result = DragoToneMapping(inputImage, gamma, bias);
break;
case "BilateralToneMap":
result = BilateralToneMapping(inputImage, gamma, sigmaSpace, sigmaColor, detailBoost);
break;
default: // LocalToneMap
result = LocalToneMapping(inputImage, gamma, sigmaSpace, detailBoost, saturation);
break;
}
_logger.Debug("Process: Method={Method}, Gamma={Gamma}, Saturation={Saturation}, DetailBoost={DetailBoost}, SigmaSpace={SigmaSpace}, SigmaColor={SigmaColor}, Bias={Bias}",
method, gamma, saturation, detailBoost, sigmaSpace, sigmaColor, bias);
return result;
}
/// <summary>
/// 局部色调映�
/// 将图åƒåˆ†è§£ä¸ºåŸºç¡€å±‚(光照)和细节层,分别处ç†åŽåˆæˆ?
/// Base = GaussianBlur(log(I))
/// Detail = log(I) - Base
/// Output = exp(Base_compressed + Detail * boost)
/// </summary>
private Image<Gray, byte> LocalToneMapping(Image<Gray, byte> inputImage,
double gamma, double sigmaSpace, double detailBoost, double saturation)
{
int width = inputImage.Width;
int height = inputImage.Height;
// 转æ¢ä¸ºæµ®ç‚¹å¹¶å½’一化到 (0, 1]
var floatImage = inputImage.Convert<Gray, float>();
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] = floatImage.Data[y, x, 0] / 255.0f + 0.001f;
// 对数�
var logImage = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
// 基础层:大尺度高斯模糊æå–光照分é‡?
int kernelSize = (int)(sigmaSpace * 6) | 1;
if (kernelSize < 3) kernelSize = 3;
var baseLayer = new Image<Gray, float>(width, height);
CvInvoke.GaussianBlur(logImage, baseLayer, new System.Drawing.Size(kernelSize, kernelSize), sigmaSpace);
// 细节�
var detailLayer = logImage - baseLayer;
// 压缩基础层的动æ€èŒƒå›?
double baseMin = double.MaxValue, baseMax = double.MinValue;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float v = baseLayer.Data[y, x, 0];
if (v < baseMin) baseMin = v;
if (v > baseMax) baseMax = v;
}
}
double baseRange = baseMax - baseMin;
if (baseRange < 0.001) baseRange = 0.001;
// 目标动æ€èŒƒå›´ï¼ˆå¯¹æ•°åŸŸï¼‰
double targetRange = Math.Log(256.0);
double compressionFactor = targetRange / baseRange;
var compressedBase = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float normalized = (float)((baseLayer.Data[y, x, 0] - baseMin) / baseRange);
compressedBase.Data[y, x, 0] = (float)(normalized * targetRange + Math.Log(0.01));
}
}
// åˆæˆï¼šåŽ‹ç¼©åŽçš„基础å±?+ 增强的细节层
var combined = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float val = compressedBase.Data[y, x, 0] + detailLayer.Data[y, x, 0] * (float)detailBoost;
combined.Data[y, x, 0] = val;
}
}
// æŒ‡æ•°å˜æ¢å›žçº¿æ€§åŸŸ
var linearResult = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)Math.Exp(combined.Data[y, x, 0]);
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
double maxVal = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (linearResult.Data[y, x, 0] > maxVal) maxVal = linearResult.Data[y, x, 0];
if (maxVal > 0)
{
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
double normalized = linearResult.Data[y, x, 0] / maxVal;
linearResult.Data[y, x, 0] = (float)(Math.Pow(normalized, invGamma) * maxVal);
}
}
}
// 饱和度增强(对比度微调)
if (Math.Abs(saturation - 1.0) > 0.01)
{
double mean = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
mean += linearResult.Data[y, x, 0];
mean /= (width * height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
double diff = linearResult.Data[y, x, 0] - mean;
linearResult.Data[y, x, 0] = (float)(mean + diff * saturation);
}
}
// 归一化到 [0, 255]
var result = NormalizeToByteImage(linearResult);
floatImage.Dispose();
logImage.Dispose();
baseLayer.Dispose();
detailLayer.Dispose();
compressedBase.Dispose();
combined.Dispose();
linearResult.Dispose();
return result;
}
/// <summary>
/// 自适应对数映射
/// æ ¹æ®åœºæ™¯çš„æ•´ä½“亮度自适应调整对数映射曲线
/// L_out = (log(1 + L_in) / log(1 + L_max)) ^ (1/gamma)
/// 使用局部自适应:L_max æ ¹æ®é‚»åŸŸè®¡ç®—
/// </summary>
private Image<Gray, byte> AdaptiveLogarithmicMapping(Image<Gray, byte> inputImage,
double gamma, double bias)
{
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
// 归一化到 [0, 1]
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] /= 255.0f;
// 计算全局最大亮�
float globalMax = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (floatImage.Data[y, x, 0] > globalMax)
globalMax = floatImage.Data[y, x, 0];
if (globalMax < 0.001f) globalMax = 0.001f;
// 计算对数平å‡äº®åº¦
double logAvg = 0;
int count = 0;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float v = floatImage.Data[y, x, 0];
if (v > 0.001f)
{
logAvg += Math.Log(v);
count++;
}
}
}
logAvg = Math.Exp(logAvg / Math.Max(count, 1));
// 自适应对数映射
// bias 控制暗部和亮部的平衡
double logBase = Math.Log(2.0 + 8.0 * Math.Pow(logAvg / globalMax, Math.Log(bias) / Math.Log(0.5)));
var result = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float lum = floatImage.Data[y, x, 0];
double mapped = Math.Log(1.0 + lum) / logBase;
result.Data[y, x, 0] = (float)mapped;
}
}
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result.Data[y, x, 0] = (float)Math.Pow(Math.Max(0, result.Data[y, x, 0]), invGamma);
}
var byteResult = NormalizeToByteImage(result);
floatImage.Dispose();
result.Dispose();
return byteResult;
}
/// <summary>
/// Drago色调映射
/// 使用自适应对数基底进行色调映射
/// L_out = log_base(1 + L_in) / log_base(1 + L_max)
/// base = 2 + 8 * (L_in / L_max) ^ (ln(bias) / ln(0.5))
/// </summary>
private Image<Gray, byte> DragoToneMapping(Image<Gray, byte> inputImage,
double gamma, double bias)
{
int width = inputImage.Width;
int height = inputImage.Height;
var floatImage = inputImage.Convert<Gray, float>();
// 归一化到 [0, 1]
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
floatImage.Data[y, x, 0] /= 255.0f;
// 全局最大亮�
float maxLum = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (floatImage.Data[y, x, 0] > maxLum)
maxLum = floatImage.Data[y, x, 0];
if (maxLum < 0.001f) maxLum = 0.001f;
double biasP = Math.Log(bias) / Math.Log(0.5);
double divider = Math.Log10(1.0 + maxLum);
if (divider < 0.001) divider = 0.001;
var result = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float lum = floatImage.Data[y, x, 0];
// 自适应对数基底
double adaptBase = 2.0 + 8.0 * Math.Pow(lum / maxLum, biasP);
double logAdapt = Math.Log(1.0 + lum) / Math.Log(adaptBase);
double mapped = logAdapt / divider;
result.Data[y, x, 0] = (float)Math.Max(0, Math.Min(1.0, mapped));
}
}
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
result.Data[y, x, 0] = (float)Math.Pow(result.Data[y, x, 0], invGamma);
}
var byteResult = NormalizeToByteImage(result);
floatImage.Dispose();
result.Dispose();
return byteResult;
}
/// <summary>
/// åŒè¾¹æ»¤æ³¢è‰²è°ƒæ˜ å°„
/// 使用åŒè¾¹æ»¤æ³¢åˆ†ç¦»åŸºç¡€å±‚和细节å±?
/// åŒè¾¹æ»¤æ³¢ä¿è¾¹ç‰¹æ€§ä½¿å¾—细节层更加精确
/// </summary>
private Image<Gray, byte> BilateralToneMapping(Image<Gray, byte> inputImage,
double gamma, double sigmaSpace, double sigmaColor, double detailBoost)
{
int width = inputImage.Width;
int height = inputImage.Height;
// 转æ¢ä¸ºæµ®ç‚¹å¹¶å–对æ•?
var floatImage = inputImage.Convert<Gray, float>();
var logImage = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0] / 255.0f + 0.001);
// åŒè¾¹æ»¤æ³¢æå–基础层(ä¿è¾¹å¹³æ»‘ï¼?
int diameter = (int)(sigmaSpace * 2) | 1;
if (diameter < 3) diameter = 3;
if (diameter > 31) diameter = 31;
var baseLayer = new Image<Gray, float>(width, height);
// 转æ¢ä¸?byte 进行åŒè¾¹æ»¤æ³¢ï¼Œå†è½¬å›ž float
var logNorm = NormalizeToByteImage(logImage);
var baseNorm = new Image<Gray, byte>(width, height);
CvInvoke.BilateralFilter(logNorm, baseNorm, diameter, sigmaColor, sigmaSpace);
// 将基础层转回浮点对数域
double logMin = double.MaxValue, logMax = double.MinValue;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float v = logImage.Data[y, x, 0];
if (v < logMin) logMin = v;
if (v > logMax) logMax = v;
}
double logRange = logMax - logMin;
if (logRange < 0.001) logRange = 0.001;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
baseLayer.Data[y, x, 0] = (float)(baseNorm.Data[y, x, 0] / 255.0 * logRange + logMin);
// 细节å±?= å¯¹æ•°å›¾åƒ - 基础å±?
var detailLayer = logImage - baseLayer;
// 压缩基础�
double baseMin = double.MaxValue, baseMax = double.MinValue;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float v = baseLayer.Data[y, x, 0];
if (v < baseMin) baseMin = v;
if (v > baseMax) baseMax = v;
}
double bRange = baseMax - baseMin;
if (bRange < 0.001) bRange = 0.001;
double targetRange = Math.Log(256.0);
double compression = targetRange / bRange;
// åˆæˆ
var combined = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
{
float compBase = (float)((baseLayer.Data[y, x, 0] - baseMin) * compression + Math.Log(0.01));
combined.Data[y, x, 0] = compBase + detailLayer.Data[y, x, 0] * (float)detailBoost;
}
// æŒ‡æ•°å˜æ¢å›žçº¿æ€§åŸŸ
var linearResult = new Image<Gray, float>(width, height);
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)Math.Exp(combined.Data[y, x, 0]);
// Gammaæ ¡æ­£
if (Math.Abs(gamma - 1.0) > 0.01)
{
double invGamma = 1.0 / gamma;
double maxVal = 0;
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
if (linearResult.Data[y, x, 0] > maxVal) maxVal = linearResult.Data[y, x, 0];
if (maxVal > 0)
for (int y = 0; y < height; y++)
for (int x = 0; x < width; x++)
linearResult.Data[y, x, 0] = (float)(Math.Pow(linearResult.Data[y, x, 0] / maxVal, invGamma) * maxVal);
}
var result = NormalizeToByteImage(linearResult);
floatImage.Dispose();
logImage.Dispose();
logNorm.Dispose();
baseNorm.Dispose();
baseLayer.Dispose();
detailLayer.Dispose();
combined.Dispose();
linearResult.Dispose();
return result;
}
/// <summary>
/// 归一化浮点图åƒåˆ°å­—节图åƒ
/// </summary>
private Image<Gray, byte> NormalizeToByteImage(Image<Gray, float> floatImage)
{
double minVal = double.MaxValue;
double maxVal = double.MinValue;
for (int y = 0; y < floatImage.Height; y++)
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
if (val < minVal) minVal = val;
if (val > maxVal) maxVal = val;
}
var result = new Image<Gray, byte>(floatImage.Size);
double range = maxVal - minVal;
if (range > 0)
{
for (int y = 0; y < floatImage.Height; y++)
for (int x = 0; x < floatImage.Width; x++)
{
int normalized = (int)((floatImage.Data[y, x, 0] - minVal) / range * 255.0);
result.Data[y, x, 0] = (byte)Math.Max(0, Math.Min(255, normalized));
}
}
return result;
}
}
@@ -0,0 +1,212 @@
// ============================================================================
// Copyright ツゥ 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 譁サカ蜷? HierarchicalEnhancementProcessor.cs
// 謠剰ソー: 螻よャ。蠅槫シコ邂怜ュ撰シ悟渕莠主、壼ーコ蠎ヲ鬮俶民蛻ァ」蟇ケ荳榊酔蟆コ蠎ヲ扈鰍迢ャ遶句「槫シ?
// 蜉溯:
// - 蟆崟蜒丞隗」荳コ螟壼アらサ鰍螻?+ 蝓コ遑€螻?
// - 蟇ケ豈丞アらサ鰍迢ャ遶区而蛻カ蠅樒?
// - 謾ッ謖∝渕遑€螻ゆコョ蠎ヲ隹紛蜥悟ッケ豈泌コヲ髯仙?
// 邂玲ウ: 螟壼ーコ蠎ヲ鬮俶民蟾ョ蛻隗」荳朱㍾蟒コ
// 菴懆€? 譚惹シ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 螻よャ。蠅槫シコ邂怜ュ撰シ悟渕莠主、壼ーコ蠎ヲ鬮俶民蟾ョ蛻ッケ荳榊酔蟆コ蠎ヲ逧崟蜒冗サ鰍霑幄。檎峡遶句「槫シコ
/// </summary>
public class HierarchicalEnhancementProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HierarchicalEnhancementProcessor>();
public HierarchicalEnhancementProcessor()
{
Name = LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Name");
Description = LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Levels", new ProcessorParameter(
"Levels",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Levels"),
typeof(int),
4,
2,
8,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_Levels_Desc")));
Parameters.Add("FineGain", new ProcessorParameter(
"FineGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_FineGain"),
typeof(double),
2.0,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_FineGain_Desc")));
Parameters.Add("MediumGain", new ProcessorParameter(
"MediumGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_MediumGain"),
typeof(double),
1.5,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_MediumGain_Desc")));
Parameters.Add("CoarseGain", new ProcessorParameter(
"CoarseGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_CoarseGain"),
typeof(double),
1.0,
0.0,
10.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_CoarseGain_Desc")));
Parameters.Add("BaseGain", new ProcessorParameter(
"BaseGain",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_BaseGain"),
typeof(double),
1.0,
0.0,
3.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_BaseGain_Desc")));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_ClipLimit"),
typeof(double),
0.0,
0.0,
50.0,
LocalizationHelper.GetString("HierarchicalEnhancementProcessor_ClipLimit_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int levels = GetParameter<int>("Levels");
double fineGain = GetParameter<double>("FineGain");
double mediumGain = GetParameter<double>("MediumGain");
double coarseGain = GetParameter<double>("CoarseGain");
double baseGain = GetParameter<double>("BaseGain");
double clipLimit = GetParameter<double>("ClipLimit");
_logger.Debug("Process: Levels={Levels}, Fine={Fine}, Medium={Medium}, Coarse={Coarse}, Base={Base}, Clip={Clip}",
levels, fineGain, mediumGain, coarseGain, baseGain, clipLimit);
int h = inputImage.Height;
int w = inputImage.Width;
// === 螟壼ーコ蠎ヲ鬮俶民蟾ョ蛻隗」驛ィ蝨ィ蜴溷ァ句霎ィ邇ク頑桃菴懶シ梧裏髴€驥大ュ怜。比ク贋ク矩㊦譬キ ===
// 逕ィ騾貞「 sigma 逧ォ俶民讓。邉顔函謌仙ケウ貊大アょコ丞哦0(蜴溷崟), G1, G2, ..., G_n(蝓コ遑€螻?
// 扈鰍螻?D_i = G_i - G_{i+1}
// 驥榊サコ嗤utput = sum(D_i * gain_i) + G_n * baseGain
// 隶。邂玲ッ丞アら噪鬮俶?sigma域欠謨ー騾貞「橸シ?
var sigmas = new double[levels];
for (int i = 0; i < levels; i++)
sigmas[i] = Math.Pow(2, i + 1); // 2, 4, 8, 16, ...
// 逕滓蟷ウ貊大アょコ丞loat 謨ー扈シ碁∩蜈?Emgu float Image 逧琉鬚假シ
var smoothLayers = new float[levels + 1][]; // [0]=蜴溷崟, [1..n]=鬮俶民讓。邉
smoothLayers[0] = new float[h * w];
var srcData = inputImage.Data;
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
smoothLayers[0][row + x] = srcData[y, x, 0];
});
for (int i = 0; i < levels; i++)
{
int ksize = ((int)(sigmas[i] * 3)) | 1; // 遑ョ菫晏・
if (ksize < 3) ksize = 3;
using var src = new Image<Gray, byte>(w, h);
// 莉惹ク贋ク€螻?float 霓?byte 蛛夐ォ俶民讓。邉?
var prevLayer = smoothLayers[i];
var sd = src.Data;
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
sd[y, x, 0] = (byte)Math.Clamp((int)Math.Round(prevLayer[row + x]), 0, 255);
});
using var dst = new Image<Gray, byte>(w, h);
CvInvoke.GaussianBlur(src, dst, new System.Drawing.Size(ksize, ksize), sigmas[i]);
smoothLayers[i + 1] = new float[h * w];
var dd = dst.Data;
var nextLayer = smoothLayers[i + 1];
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
nextLayer[row + x] = dd[y, x, 0];
});
}
// === 隶。邂怜「樒寢謠貞€シ蟷カ逶エ謗・驥榊サコ ===
var gains = new double[levels];
for (int i = 0; i < levels; i++)
{
double t = levels <= 1 ? 0.0 : (double)i / (levels - 1);
if (t <= 0.5)
{
double t2 = t * 2.0;
gains[i] = fineGain * (1.0 - t2) + mediumGain * t2;
}
else
{
double t2 = (t - 0.5) * 2.0;
gains[i] = mediumGain * (1.0 - t2) + coarseGain * t2;
}
}
// 驥榊サコ嗤utput = baseGain * G_n + sum(gain_i * (G_i - G_{i+1}))
float fBaseGain = (float)baseGain;
float fClip = (float)clipLimit;
var baseLayerData = smoothLayers[levels];
var result = new Image<Gray, byte>(w, h);
var resultData = result.Data;
// 鬚スャ謐?gains 荳?float
var fGains = new float[levels];
for (int i = 0; i < levels; i++)
fGains[i] = (float)gains[i];
Parallel.For(0, h, y =>
{
int row = y * w;
for (int x = 0; x < w; x++)
{
int idx = row + x;
float val = baseLayerData[idx] * fBaseGain;
for (int i = 0; i < levels; i++)
{
float detail = smoothLayers[i][idx] - smoothLayers[i + 1][idx];
detail *= fGains[i];
if (fClip > 0)
detail = Math.Clamp(detail, -fClip, fClip);
val += detail;
}
resultData[y, x, 0] = (byte)Math.Clamp((int)Math.Round(val), 0, 255);
}
});
_logger.Debug("Process completed: {Levels} levels, output={W}x{H}", levels, w, h);
return result;
}
}
@@ -0,0 +1,142 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? HistogramEqualizationProcessor.cs
// æè¿°: 直方图å‡è¡¡åŒ–ç®—å­ï¼Œç”¨äºŽå¢žå¼ºå›¾åƒå¯¹æ¯”度
// 功能:
// - 全局直方图å‡è¡¡åŒ–
// - 自适应直方图å‡è¡¡åŒ–(CLAHEï¼?
// - é™åˆ¶å¯¹æ¯”度增å¼?
// - 改善图åƒçš„æ•´ä½“对比度
// 算法: 直方图å‡è¡¡åŒ–ã€CLAHE
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 直方图å‡è¡¡åŒ–ç®—å­
/// </summary>
public class HistogramEqualizationProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HistogramEqualizationProcessor>();
public HistogramEqualizationProcessor()
{
Name = LocalizationHelper.GetString("HistogramEqualizationProcessor_Name");
Description = LocalizationHelper.GetString("HistogramEqualizationProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("HistogramEqualizationProcessor_Method"),
typeof(string),
"Global",
null,
null,
LocalizationHelper.GetString("HistogramEqualizationProcessor_Method_Desc"),
new string[] { "Global", "CLAHE" }));
Parameters.Add("ClipLimit", new ProcessorParameter(
"ClipLimit",
LocalizationHelper.GetString("HistogramEqualizationProcessor_ClipLimit"),
typeof(double),
2.0,
1.0,
10.0,
LocalizationHelper.GetString("HistogramEqualizationProcessor_ClipLimit_Desc")));
Parameters.Add("TileSize", new ProcessorParameter(
"TileSize",
LocalizationHelper.GetString("HistogramEqualizationProcessor_TileSize"),
typeof(int),
8,
4,
32,
LocalizationHelper.GetString("HistogramEqualizationProcessor_TileSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double clipLimit = GetParameter<double>("ClipLimit");
int tileSize = GetParameter<int>("TileSize");
Image<Gray, byte> result;
if (method == "CLAHE")
{
result = ApplyCLAHE(inputImage, clipLimit, tileSize);
}
else // Global
{
result = new Image<Gray, byte>(inputImage.Size);
CvInvoke.EqualizeHist(inputImage, result);
}
_logger.Debug("Process: Method = {Method}, ClipLimit = {ClipLimit}, TileSize = {TileSize}",
method, clipLimit, tileSize);
return result;
}
private Image<Gray, byte> ApplyCLAHE(Image<Gray, byte> inputImage, double clipLimit, int tileSize)
{
int width = inputImage.Width;
int height = inputImage.Height;
int tilesX = (width + tileSize - 1) / tileSize;
int tilesY = (height + tileSize - 1) / tileSize;
var result = new Image<Gray, byte>(width, height);
// 对æ¯ä¸ªtile进行直方图å‡è¡¡åŒ–
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int x = tx * tileSize;
int y = ty * tileSize;
int w = Math.Min(tileSize, width - x);
int h = Math.Min(tileSize, height - y);
var roi = new System.Drawing.Rectangle(x, y, w, h);
inputImage.ROI = roi;
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
// 应用直方图å‡è¡¡åŒ–
var equalizedTile = new Image<Gray, byte>(tile.Size);
CvInvoke.EqualizeHist(tile, equalizedTile);
// 应用é™åˆ¶ï¼ˆç®€åŒ–版本)
var floatTile = tile.Convert<Gray, float>();
var floatEqualized = equalizedTile.Convert<Gray, float>();
var diff = floatEqualized - floatTile;
var limited = floatTile + diff * Math.Min(clipLimit / 10.0, 1.0);
var limitedByte = limited.Convert<Gray, byte>();
// å¤åˆ¶åˆ°ç»“果图åƒ?
result.ROI = roi;
limitedByte.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
tile.Dispose();
equalizedTile.Dispose();
floatTile.Dispose();
floatEqualized.Dispose();
diff.Dispose();
limited.Dispose();
limitedByte.Dispose();
}
}
return result;
}
}
@@ -0,0 +1,267 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? HistogramOverlayProcessor.cs
// 讛膩: 湔䲮摮琜霈∠啣漲湔䲮曉僎隞亥嗅㦛蝏睃𨅯㦛誩椰銝𡃏
// :
// - 霈∠颲枏摨衣凒孵㦛
// - 撠孵㦛蝏睃銝箄𤩺梁𠶖撌虫閫?
// - 颲枏枂湔䲮霈∟”潭㺭?
// 蝞埈: 啣漲湔䲮霈?+ 敶抵𠧧
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using System.Text;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 湔䲮摮琜霈∠啣漲湔䲮曉僎隞亥嗅㦛蝏睃𨅯㦛誩椰銝𡃏霈∟”?
/// </summary>
public class HistogramOverlayProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<HistogramOverlayProcessor>();
//
private const int ChartWidth = 256; // 梁𠶖曉躹摰賢漲
private const int ChartHeight = 200; // 梁𠶖曉躹擃睃漲
private const int AxisMarginLeft = 50; // Y頧湔蝑暸坔捐摨?
private const int AxisMarginBottom = 25; // X頧湔蝑暸摨?
private const int Padding = 8; // 峕艶憸嘥器頝?
private const int PaddingRight = 25; // 喃儒憸嘥器頝嘅摰寧熙X頧湔錰撠曉摨行摮梹
private const int Margin = 10; // 頝嘥㦛誩椰銝𡃏颲寡
private const float BgAlpha = 0.6f;
private const double FontScale = 0.35;
private const int FontThickness = 1;
public HistogramOverlayProcessor()
{
Name = LocalizationHelper.GetString("HistogramOverlayProcessor_Name");
Description = LocalizationHelper.GetString("HistogramOverlayProcessor_Description");
}
protected override void InitializeParameters()
{
// 惩虾靚?
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
int h = inputImage.Height;
int w = inputImage.Width;
var srcData = inputImage.Data;
// === 1. 霈∠啣漲湔䲮?===
var hist = new int[256];
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
hist[srcData[y, x, 0]]++;
int maxCount = 0;
long totalPixels = (long)h * w;
for (int i = 0; i < 256; i++)
if (hist[i] > maxCount) maxCount = hist[i];
// === 2. 霈∠蝏蠘恣靽⊥ ===
double mean = 0, variance = 0;
int minVal = 255, maxVal = 0;
int modeVal = 0, modeCount = 0;
long medianTarget = totalPixels / 2, cumulative = 0;
int medianVal = 0;
bool medianFound = false;
for (int i = 0; i < 256; i++)
{
if (hist[i] > 0)
{
if (i < minVal) minVal = i;
if (i > maxVal) maxVal = i;
}
if (hist[i] > modeCount) { modeCount = hist[i]; modeVal = i; }
mean += (double)i * hist[i];
cumulative += hist[i];
if (!medianFound && cumulative >= medianTarget) { medianVal = i; medianFound = true; }
}
mean /= totalPixels;
for (int i = 0; i < 256; i++)
variance += hist[i] * (i - mean) * (i - mean);
variance /= totalPixels;
double stdDev = Math.Sqrt(variance);
// === 3. 颲枏枂銵冽聢唳旿 ===
var sb = new StringBuilder();
sb.AppendLine("=== 啣漲湔䲮霈?===");
sb.AppendLine($"撠箏站: {w} x {h}");
sb.AppendLine($"蝝䭾㺭: {totalPixels}");
sb.AppendLine($"撠讐摨? {minVal}");
sb.AppendLine($"憭抒摨? {maxVal}");
sb.AppendLine($"撟喳啣漲: {mean:F2}");
sb.AppendLine($"銝凋啣漲: {medianVal}");
sb.AppendLine($"隡埈㺭啣漲: {modeVal} (箇緵 {modeCount} 甈?");
sb.AppendLine($"撌? {stdDev:F2}");
sb.AppendLine();
sb.AppendLine("啣漲墦t豹t(%)");
for (int i = 0; i < 256; i++)
{
if (hist[i] > 0)
sb.AppendLine($"{i}\t{hist[i]}\t{(double)hist[i] / totalPixels * 100.0:F4}");
}
OutputData["HistogramTable"] = sb.ToString();
OutputData["Histogram"] = hist;
// === 4. 敶抵𠧧嗅㦛 + XY頧游 ===
var colorImage = inputImage.Convert<Bgr, byte>();
var colorData = colorImage.Data;
// 撣嚗朞臬躹?Padding + Y頧湔蝑?+ 蝏睃㦛?+ Padding嚗偌撟喉
// Padding + 蝏睃㦛?+ X頧湔蝑?+ Padding嚗
int totalW = Padding + AxisMarginLeft + ChartWidth + PaddingRight;
int totalH = Padding + ChartHeight + AxisMarginBottom + Padding;
int bgW = Math.Min(totalW, w - Margin);
int bgH = Math.Min(totalH, h - Margin);
if (bgW > Padding + AxisMarginLeft && bgH > Padding + AxisMarginBottom)
{
int plotW = Math.Min(ChartWidth, bgW - Padding - AxisMarginLeft - PaddingRight);
int plotH = Math.Min(ChartHeight, bgH - Padding - AxisMarginBottom - Padding);
if (plotW <= 0 || plotH <= 0) goto SkipOverlay;
// 蝏睃㦛箏椰銝𡃏典㦛譍葉?
int plotX0 = Margin + Padding + AxisMarginLeft;
int plotY0 = Margin + Padding;
// 霈∠瘥誩
double binWidth = (double)plotW / 256.0;
var barHeights = new int[plotW];
for (int px = 0; px < plotW; px++)
{
int bin = Math.Min((int)(px / binWidth), 255);
barHeights[px] = maxCount > 0 ? (int)((long)hist[bin] * (plotH - 1) / maxCount) : 0;
}
float alpha = BgAlpha;
float inv = 1.0f - alpha;
// 蝏睃𢠃𤩺暺𤏸𠧧峕艶嚗𡝗㟲銝芸躹笔鉄頧游器頝嘅
Parallel.For(0, bgH, dy =>
{
int imgY = Margin + dy;
if (imgY >= h) return;
for (int dx = 0; dx < bgW; dx++)
{
int imgX = Margin + dx;
if (imgX >= w) break;
colorData[imgY, imgX, 0] = (byte)(int)(colorData[imgY, imgX, 0] * inv);
colorData[imgY, imgX, 1] = (byte)(int)(colorData[imgY, imgX, 1] * inv);
colorData[imgY, imgX, 2] = (byte)(int)(colorData[imgY, imgX, 2] * inv);
}
});
// 蝏睃肽𠧧梁𠶖?
Parallel.For(0, plotH, dy =>
{
int imgY = plotY0 + dy;
if (imgY >= h) return;
int rowFromBottom = plotH - 1 - dy;
for (int dx = 0; dx < plotW; dx++)
{
int imgX = plotX0 + dx;
if (imgX >= w) break;
if (rowFromBottom < barHeights[dx])
{
byte curB = colorData[imgY, imgX, 0];
byte curG = colorData[imgY, imgX, 1];
byte curR = colorData[imgY, imgX, 2];
colorData[imgY, imgX, 0] = (byte)Math.Clamp(curB + (int)(255 * alpha), 0, 255);
colorData[imgY, imgX, 1] = (byte)Math.Clamp(curG + (int)(50 * alpha), 0, 255);
colorData[imgY, imgX, 2] = (byte)Math.Clamp(curR + (int)(50 * alpha), 0, 255);
}
}
});
// === 5. 蝏睃頧渡瑪摨行瘜?===
var white = new MCvScalar(255, 255, 255);
var gray = new MCvScalar(180, 180, 180);
// Y頧渡瑪
CvInvoke.Line(colorImage,
new Point(plotX0, plotY0),
new Point(plotX0, plotY0 + plotH),
white, 1);
// X頧渡瑪
CvInvoke.Line(colorImage,
new Point(plotX0, plotY0 + plotH),
new Point(plotX0 + plotW, plotY0 + plotH),
white, 1);
// X頧游摨? 0, 64, 128, 192, 255
int[] xTicks = { 0, 64, 128, 192, 255 };
foreach (int tick in xTicks)
{
int tx = plotX0 + (int)(tick * binWidth);
if (tx >= w) break;
CvInvoke.Line(colorImage,
new Point(tx, plotY0 + plotH),
new Point(tx, plotY0 + plotH + 4),
white, 1);
string label = tick.ToString();
CvInvoke.PutText(colorImage, label,
new Point(tx - 8, plotY0 + plotH + 18),
FontFace.HersheySimplex, FontScale, white, FontThickness);
}
// Y頧游摨? 0%, 25%, 50%, 75%, 100%
for (int i = 0; i <= 4; i++)
{
int val = maxCount * i / 4;
int ty = plotY0 + plotH - (int)((long)plotH * i / 4);
CvInvoke.Line(colorImage,
new Point(plotX0 - 4, ty),
new Point(plotX0, ty),
white, 1);
// 蝵烐聢𡁶瑪
if (i > 0 && i < 4)
{
for (int gx = plotX0 + 2; gx < plotX0 + plotW; gx += 6)
{
int gxEnd = Math.Min(gx + 2, plotX0 + plotW);
CvInvoke.Line(colorImage,
new Point(gx, ty),
new Point(gxEnd, ty),
gray, 1);
}
}
string label = FormatCount(val);
CvInvoke.PutText(colorImage, label,
new Point(Margin + Padding, ty + 4),
FontFace.HersheySimplex, FontScale, white, FontThickness);
}
}
SkipOverlay:
OutputData["PseudoColorImage"] = colorImage;
_logger.Debug("Process completed: histogram overlay, mean={Mean:F2}, stdDev={Std:F2}", mean, stdDev);
return inputImage.Clone();
}
/// <summary>
/// 蝝㰘恣唬蛹蝝批摮㛖泵銝莎憒?12345 ?"12.3K"嚗?
/// </summary>
private static string FormatCount(int count)
{
if (count >= 1_000_000) return $"{count / 1_000_000.0:F1}M";
if (count >= 1_000) return $"{count / 1_000.0:F1}K";
return count.ToString();
}
}
@@ -0,0 +1,320 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? RetinexProcessor.cs
// æè¿°: 基于Retinex的多尺度阴影校正算å­
// 功能:
// - å•尺度Retinex (SSR)
// - 多尺度Retinex (MSR)
// - 带色彩æ¢å¤çš„多尺度Retinex (MSRCR)
// - 光照ä¸å‡åŒ€æ ¡æ­£
// - 阴影去除
// 算法: Retinexç†è®º - 将图åƒåˆ†è§£ä¸ºå射分é‡å’Œå…‰ç…§åˆ†é‡?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// Retinex多尺度阴影校正算�
/// </summary>
public class RetinexProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<RetinexProcessor>();
public RetinexProcessor()
{
Name = LocalizationHelper.GetString("RetinexProcessor_Name");
Description = LocalizationHelper.GetString("RetinexProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("RetinexProcessor_Method"),
typeof(string),
"MSR",
null,
null,
LocalizationHelper.GetString("RetinexProcessor_Method_Desc"),
new string[] { "SSR", "MSR", "MSRCR" }));
Parameters.Add("Sigma1", new ProcessorParameter(
"Sigma1",
LocalizationHelper.GetString("RetinexProcessor_Sigma1"),
typeof(double),
15.0,
1.0,
100.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma1_Desc")));
Parameters.Add("Sigma2", new ProcessorParameter(
"Sigma2",
LocalizationHelper.GetString("RetinexProcessor_Sigma2"),
typeof(double),
80.0,
1.0,
200.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma2_Desc")));
Parameters.Add("Sigma3", new ProcessorParameter(
"Sigma3",
LocalizationHelper.GetString("RetinexProcessor_Sigma3"),
typeof(double),
250.0,
1.0,
500.0,
LocalizationHelper.GetString("RetinexProcessor_Sigma3_Desc")));
Parameters.Add("Gain", new ProcessorParameter(
"Gain",
LocalizationHelper.GetString("RetinexProcessor_Gain"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("RetinexProcessor_Gain_Desc")));
Parameters.Add("Offset", new ProcessorParameter(
"Offset",
LocalizationHelper.GetString("RetinexProcessor_Offset"),
typeof(int),
0,
-100,
100,
LocalizationHelper.GetString("RetinexProcessor_Offset_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double sigma1 = GetParameter<double>("Sigma1");
double sigma2 = GetParameter<double>("Sigma2");
double sigma3 = GetParameter<double>("Sigma3");
double gain = GetParameter<double>("Gain");
int offset = GetParameter<int>("Offset");
Image<Gray, byte> result;
if (method == "SSR")
{
// å•尺度Retinex
result = SingleScaleRetinex(inputImage, sigma2, gain, offset);
}
else if (method == "MSR")
{
// 多尺度Retinex
result = MultiScaleRetinex(inputImage, new[] { sigma1, sigma2, sigma3 }, gain, offset);
}
else // MSRCR
{
// 带色彩æ¢å¤çš„多尺度Retinex
result = MultiScaleRetinexCR(inputImage, new[] { sigma1, sigma2, sigma3 }, gain, offset);
}
_logger.Debug("Process: Method = {Method}, Sigma1 = {Sigma1}, Sigma2 = {Sigma2}, Sigma3 = {Sigma3}, Gain = {Gain}, Offset = {Offset}",
method, sigma1, sigma2, sigma3, gain, offset);
return result;
}
/// <summary>
/// å•尺度Retinex (SSR)
/// R(x,y) = log(I(x,y)) - log(I(x,y) * G(x,y))
/// </summary>
private Image<Gray, byte> SingleScaleRetinex(Image<Gray, byte> inputImage, double sigma, double gain, int offset)
{
// 转æ¢ä¸ºæµ®ç‚¹å›¾åƒå¹¶æ·»åŠ å°å¸¸æ•°é¿å…log(0)
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
floatImage = floatImage + 1.0f;
// 计算log(I)
Image<Gray, float> logImage = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
}
}
// 高斯模糊得到光照分é‡
Image<Gray, float> blurred = new Image<Gray, float>(inputImage.Size);
int kernelSize = (int)(sigma * 6) | 1; // ç¡®ä¿ä¸ºå¥‡æ•?
if (kernelSize < 3) kernelSize = 3;
CvInvoke.GaussianBlur(floatImage, blurred, new System.Drawing.Size(kernelSize, kernelSize), sigma);
// 计算log(I * G)
Image<Gray, float> logBlurred = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logBlurred.Data[y, x, 0] = (float)Math.Log(blurred.Data[y, x, 0]);
}
}
// R = log(I) - log(I*G)
Image<Gray, float> retinex = logImage - logBlurred;
// 应用增益和åç§?
retinex = retinex * gain + offset;
// 归一化到0-255
Image<Gray, byte> result = NormalizeToByteImage(retinex);
floatImage.Dispose();
logImage.Dispose();
blurred.Dispose();
logBlurred.Dispose();
retinex.Dispose();
return result;
}
/// <summary>
/// 多尺度Retinex (MSR)
/// MSR = Σ(w_i * SSR_i) / N
/// </summary>
private Image<Gray, byte> MultiScaleRetinex(Image<Gray, byte> inputImage, double[] sigmas, double gain, int offset)
{
// 转æ¢ä¸ºæµ®ç‚¹å›¾åƒ?
Image<Gray, float> floatImage = inputImage.Convert<Gray, float>();
floatImage = floatImage + 1.0f;
// 计算log(I)
Image<Gray, float> logImage = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logImage.Data[y, x, 0] = (float)Math.Log(floatImage.Data[y, x, 0]);
}
}
// 累加多个尺度的结�
Image<Gray, float> msrResult = new Image<Gray, float>(inputImage.Size);
msrResult.SetZero();
foreach (double sigma in sigmas)
{
// 高斯模糊
Image<Gray, float> blurred = new Image<Gray, float>(inputImage.Size);
int kernelSize = (int)(sigma * 6) | 1;
if (kernelSize < 3) kernelSize = 3;
CvInvoke.GaussianBlur(floatImage, blurred, new System.Drawing.Size(kernelSize, kernelSize), sigma);
// 计算log(I*G)
Image<Gray, float> logBlurred = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
logBlurred.Data[y, x, 0] = (float)Math.Log(blurred.Data[y, x, 0]);
}
}
// 累加 SSR
msrResult = msrResult + (logImage - logBlurred);
blurred.Dispose();
logBlurred.Dispose();
}
// å¹³å
msrResult = msrResult / sigmas.Length;
// 应用增益和åç§?
msrResult = msrResult * gain + offset;
// 归一�
Image<Gray, byte> result = NormalizeToByteImage(msrResult);
floatImage.Dispose();
logImage.Dispose();
msrResult.Dispose();
return result;
}
/// <summary>
/// 带色彩æ¢å¤çš„多尺度Retinex (MSRCR)
/// 对于ç°åº¦å›¾åƒï¼Œä½¿ç”¨ç®€åŒ–版æœ?
/// </summary>
private Image<Gray, byte> MultiScaleRetinexCR(Image<Gray, byte> inputImage, double[] sigmas, double gain, int offset)
{
// 先执行MSR
Image<Gray, byte> msrResult = MultiScaleRetinex(inputImage, sigmas, gain, offset);
// 对于ç°åº¦å›¾åƒï¼Œè‰²å½©æ¢å¤ç®€åŒ–为对比度增å¼?
Image<Gray, float> floatMsr = msrResult.Convert<Gray, float>();
Image<Gray, float> floatInput = inputImage.Convert<Gray, float>();
// 简å•的色彩æ¢å¤ï¼šå¢žå¼ºå±€éƒ¨å¯¹æ¯”度
Image<Gray, float> enhanced = new Image<Gray, float>(inputImage.Size);
for (int y = 0; y < inputImage.Height; y++)
{
for (int x = 0; x < inputImage.Width; x++)
{
float msr = floatMsr.Data[y, x, 0];
float original = floatInput.Data[y, x, 0];
// 色彩æ¢å¤å› å­
float c = (float)Math.Log(original + 1.0) / (float)Math.Log(128.0);
enhanced.Data[y, x, 0] = msr * c;
}
}
Image<Gray, byte> result = NormalizeToByteImage(enhanced);
msrResult.Dispose();
floatMsr.Dispose();
floatInput.Dispose();
enhanced.Dispose();
return result;
}
/// <summary>
/// 归一化浮点图åƒåˆ°å­—节图åƒ
/// </summary>
private Image<Gray, byte> NormalizeToByteImage(Image<Gray, float> floatImage)
{
// 找到最å°å€¼å’Œæœ€å¤§å€?
double minVal = double.MaxValue;
double maxVal = double.MinValue;
for (int y = 0; y < floatImage.Height; y++)
{
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
if (val < minVal) minVal = val;
if (val > maxVal) maxVal = val;
}
}
// 归一化到0-255
Image<Gray, byte> result = new Image<Gray, byte>(floatImage.Size);
double range = maxVal - minVal;
if (range > 0)
{
for (int y = 0; y < floatImage.Height; y++)
{
for (int x = 0; x < floatImage.Width; x++)
{
float val = floatImage.Data[y, x, 0];
int normalized = (int)((val - minVal) / range * 255.0);
result.Data[y, x, 0] = (byte)Math.Max(0, Math.Min(255, normalized));
}
}
}
return result;
}
}
@@ -0,0 +1,141 @@
// ============================================================================
// Copyright © 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? SharpenProcessor.cs
// æè¿°: é”化算å­ï¼Œç”¨äºŽå¢žå¼ºå›¾åƒè¾¹ç¼˜å’Œç»†èŠ‚
// 功能:
// - 拉普拉斯é”化
// - éžé”化掩蔽(Unsharp Maskingï¼?
// - å¯è°ƒèŠ‚é”化强åº?
// - 支æŒå¤šç§é”化æ ?
// 算法: 拉普拉斯算å­ã€éžé”化掩蔽
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// é”化算å­
/// </summary>
public class SharpenProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SharpenProcessor>();
public SharpenProcessor()
{
Name = LocalizationHelper.GetString("SharpenProcessor_Name");
Description = LocalizationHelper.GetString("SharpenProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Method", new ProcessorParameter(
"Method",
LocalizationHelper.GetString("SharpenProcessor_Method"),
typeof(string),
"Laplacian",
null,
null,
LocalizationHelper.GetString("SharpenProcessor_Method_Desc"),
new string[] { "Laplacian", "UnsharpMask" }));
Parameters.Add("Strength", new ProcessorParameter(
"Strength",
LocalizationHelper.GetString("SharpenProcessor_Strength"),
typeof(double),
1.0,
0.1,
5.0,
LocalizationHelper.GetString("SharpenProcessor_Strength_Desc")));
Parameters.Add("KernelSize", new ProcessorParameter(
"KernelSize",
LocalizationHelper.GetString("SharpenProcessor_KernelSize"),
typeof(int),
3,
1,
15,
LocalizationHelper.GetString("SharpenProcessor_KernelSize_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string method = GetParameter<string>("Method");
double strength = GetParameter<double>("Strength");
int kernelSize = GetParameter<int>("KernelSize");
if (kernelSize % 2 == 0) kernelSize++;
Image<Gray, byte> result;
if (method == "UnsharpMask")
{
result = ApplyUnsharpMask(inputImage, kernelSize, strength);
}
else // Laplacian
{
result = ApplyLaplacianSharpening(inputImage, strength);
}
_logger.Debug("Process: Method = {Method}, Strength = {Strength}, KernelSize = {KernelSize}",
method, strength, kernelSize);
return result;
}
private Image<Gray, byte> ApplyLaplacianSharpening(Image<Gray, byte> inputImage, double strength)
{
// 计算拉普拉斯算å­
var laplacian = new Image<Gray, float>(inputImage.Size);
CvInvoke.Laplacian(inputImage, laplacian, DepthType.Cv32F, 1);
// 转æ¢ä¸ºå­—节类åž?
var laplacianByte = laplacian.Convert<Gray, byte>();
// 将拉普拉斯结果加到原图上进行é”化
var floatImage = inputImage.Convert<Gray, float>();
var sharpened = floatImage + laplacian * strength;
// é™åˆ¶èŒƒå›´å¹¶è½¬æ¢å›žå­—节类型
var result = sharpened.Convert<Gray, byte>();
laplacian.Dispose();
laplacianByte.Dispose();
floatImage.Dispose();
sharpened.Dispose();
return result;
}
private Image<Gray, byte> ApplyUnsharpMask(Image<Gray, byte> inputImage, int kernelSize, double strength)
{
// 创建模糊图åƒ
var blurred = new Image<Gray, byte>(inputImage.Size);
CvInvoke.GaussianBlur(inputImage, blurred,
new System.Drawing.Size(kernelSize, kernelSize), 0);
// 计算差异(细节)
var floatInput = inputImage.Convert<Gray, float>();
var floatBlurred = blurred.Convert<Gray, float>();
var detail = floatInput - floatBlurred;
// 将细节加回原�
var sharpened = floatInput + detail * strength;
// 转æ¢å›žå­—节类åž?
var result = sharpened.Convert<Gray, byte>();
blurred.Dispose();
floatInput.Dispose();
floatBlurred.Dispose();
detail.Dispose();
sharpened.Dispose();
return result;
}
}
@@ -0,0 +1,127 @@
// ============================================================================
// Copyright © 2016-2025 Hexagon Technology Center GmbH. All Rights Reserved.
// 文件å? SubPixelZoomProcessor.cs
// æè¿°: 亚åƒç´ æ”¾å¤§ç®—å­ï¼Œé€šè¿‡é«˜è´¨é‡æ’值实现图åƒçš„亚åƒç´ çº§æ”¾å¤§
// 功能:
// - 支æŒä»»æ„å€çŽ‡æ”¾å¤§ï¼ˆå«å°æ•°å€çއå¦?1.5xã€?.3xï¼?
// - å¤šç§æ’值方法(最近邻ã€åŒçº¿æ€§ã€åŒä¸‰æ¬¡ã€Lanczosï¼?
// - å¯é€‰é”化补å¿ï¼ˆæŠµæ¶ˆæ’值模糊)
// - å¯é€‰æŒ‡å®šè¾“出尺å¯?
// 算法: 基于 OpenCV Resize çš„é«˜è´¨é‡æ’值放å¤?
// 作è€? æŽä¼Ÿ wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Serilog;
using System.Drawing;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 亚åƒç´ æ”¾å¤§ç®—å­?
/// </summary>
public class SubPixelZoomProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SubPixelZoomProcessor>();
public SubPixelZoomProcessor()
{
Name = LocalizationHelper.GetString("SubPixelZoomProcessor_Name");
Description = LocalizationHelper.GetString("SubPixelZoomProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("ScaleFactor", new ProcessorParameter(
"ScaleFactor",
LocalizationHelper.GetString("SubPixelZoomProcessor_ScaleFactor"),
typeof(double),
2.0,
1.0,
16.0,
LocalizationHelper.GetString("SubPixelZoomProcessor_ScaleFactor_Desc")));
Parameters.Add("Interpolation", new ProcessorParameter(
"Interpolation",
LocalizationHelper.GetString("SubPixelZoomProcessor_Interpolation"),
typeof(string),
"Lanczos",
null,
null,
LocalizationHelper.GetString("SubPixelZoomProcessor_Interpolation_Desc"),
new string[] { "Nearest", "Bilinear", "Bicubic", "Lanczos" }));
Parameters.Add("SharpenAfter", new ProcessorParameter(
"SharpenAfter",
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenAfter"),
typeof(bool),
false,
null,
null,
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenAfter_Desc")));
Parameters.Add("SharpenStrength", new ProcessorParameter(
"SharpenStrength",
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenStrength"),
typeof(double),
0.5,
0.1,
3.0,
LocalizationHelper.GetString("SubPixelZoomProcessor_SharpenStrength_Desc")));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
double scaleFactor = GetParameter<double>("ScaleFactor");
string interpolation = GetParameter<string>("Interpolation");
bool sharpenAfter = GetParameter<bool>("SharpenAfter");
double sharpenStrength = GetParameter<double>("SharpenStrength");
Inter interMethod = interpolation switch
{
"Nearest" => Inter.Nearest,
"Bilinear" => Inter.Linear,
"Bicubic" => Inter.Cubic,
_ => Inter.Lanczos4
};
int newWidth = (int)Math.Round(inputImage.Width * scaleFactor);
int newHeight = (int)Math.Round(inputImage.Height * scaleFactor);
// ç¡®ä¿æœ€å°å°ºå¯¸ä¸º 1
newWidth = Math.Max(1, newWidth);
newHeight = Math.Max(1, newHeight);
var result = new Image<Gray, byte>(newWidth, newHeight);
CvInvoke.Resize(inputImage, result, new Size(newWidth, newHeight), 0, 0, interMethod);
// é”化补å¿
if (sharpenAfter)
{
// Unsharp Masking: result = result + strength * (result - blur)
int ksize = Math.Max(3, (int)(scaleFactor * 2) | 1); // 奇数�
using var blurred = result.SmoothGaussian(ksize);
for (int y = 0; y < newHeight; y++)
{
for (int x = 0; x < newWidth; x++)
{
float val = result.Data[y, x, 0];
float blur = blurred.Data[y, x, 0];
float sharpened = val + (float)(sharpenStrength * (val - blur));
result.Data[y, x, 0] = (byte)Math.Clamp((int)sharpened, 0, 255);
}
}
}
_logger.Debug("Process: Scale={Scale}, Interp={Interp}, Size={W}x{H}, Sharpen={Sharpen}",
scaleFactor, interpolation, newWidth, newHeight, sharpenAfter);
return result;
}
}
@@ -0,0 +1,319 @@
// ============================================================================
// Copyright 穢 2026 Hexagon Technology Center GmbH. All Rights Reserved.
// ? SuperResolutionProcessor.cs
// 讛膩: 瘛勗漲摮虫摮?
// :
// - EDSR ?FSRCNN 頞颲函璅∪嚗㇉NNX 嚗?
// - 2x?x?x 曉之
// - 啣漲芸𢆡頧祆揢銝箔𡁻颲枏嚗峕綫頧砍啣漲
// - 璅∪芸𢆡𦦵揣嚗峕𣈲䌊摰帋頝臬
// - 雿輻鍂 Microsoft.ML.OnnxRuntime 餈𥡝
// 蝞埈: EDSR (Enhanced Deep Residual SR) / FSRCNN (Fast SR CNN)
// 雿𡏭? 𦒘 wei.lw.li@hexagon.com
// ============================================================================
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Microsoft.ML.OnnxRuntime;
using Microsoft.ML.OnnxRuntime.Tensors;
using Serilog;
using XP.ImageProcessing.Core;
namespace XP.ImageProcessing.Processors;
/// <summary>
/// 瘛勗漲摮虫摮琜EDSR / FSRCNN嚗㚁雿輻鍂 ONNX Runtime
/// </summary>
public class SuperResolutionProcessor : ImageProcessorBase
{
private static readonly ILogger _logger = Log.ForContext<SuperResolutionProcessor>();
// 隡朞蝻枏嚗屸憭滚頧?
private static InferenceSession? _cachedSession;
private static string _cachedModelKey = string.Empty;
public SuperResolutionProcessor()
{
Name = LocalizationHelper.GetString("SuperResolutionProcessor_Name");
Description = LocalizationHelper.GetString("SuperResolutionProcessor_Description");
}
protected override void InitializeParameters()
{
Parameters.Add("Model", new ProcessorParameter(
"Model",
LocalizationHelper.GetString("SuperResolutionProcessor_Model"),
typeof(string),
"FSRCNN",
null,
null,
LocalizationHelper.GetString("SuperResolutionProcessor_Model_Desc"),
new string[] { "EDSR", "FSRCNN" }));
Parameters.Add("Scale", new ProcessorParameter(
"Scale",
LocalizationHelper.GetString("SuperResolutionProcessor_Scale"),
typeof(string),
"2",
null,
null,
LocalizationHelper.GetString("SuperResolutionProcessor_Scale_Desc"),
new string[] { "2", "3", "4" }));
_logger.Debug("InitializeParameters");
}
public override Image<Gray, byte> Process(Image<Gray, byte> inputImage)
{
string model = GetParameter<string>("Model");
int scale = int.Parse(GetParameter<string>("Scale"));
// 交𪄳璅∪
string modelPath = FindModelFile(model, scale);
if (string.IsNullOrEmpty(modelPath))
{
_logger.Error("Model file not found: {Model}_x{Scale}.onnx", model, scale);
throw new FileNotFoundException(
$"頞颲函璅∪芣𪄳? {model}_x{scale}.onnx\n" +
$"霂瑕璅∪曄蔭唬誑銝衤遙銝:\n" +
$" 1. 蝔见/Models/\n" +
$" 2. 蝔见/\n" +
$"璅∪閬?ONNX n" +
$"臭蝙?tf2onnx 隞?.pb 頧祆揢:\n" +
$" pip install tf2onnx\n" +
$" python -m tf2onnx.convert --input {model}_x{scale}.pb --output {model}_x{scale}.onnx --inputs input:0 --outputs output:0");
}
// 㰘蝸霂?
string modelKey = $"{model}_{scale}";
InferenceSession session;
if (_cachedModelKey == modelKey && _cachedSession != null)
{
session = _cachedSession;
_logger.Debug("Reusing cached session: {ModelKey}", modelKey);
}
else
{
_cachedSession?.Dispose();
var options = new SessionOptions();
options.GraphOptimizationLevel = GraphOptimizationLevel.ORT_ENABLE_ALL;
try
{
options.AppendExecutionProvider_CUDA(0);
_logger.Information("Using CUDA GPU for inference");
}
catch
{
_logger.Warning("CUDA not available, falling back to CPU");
}
session = new InferenceSession(modelPath, options);
_cachedSession = session;
_cachedModelKey = modelKey;
// 霈啣摰鮋雿輻鍂?Execution Provider
var providers = session.ModelMetadata?.CustomMetadataMap;
_logger.Information("Loaded ONNX model: {ModelPath}, Providers: {Providers}",
modelPath, string.Join(", ", session.GetType().Name));
}
int h = inputImage.Height;
int w = inputImage.Width;
_logger.Information("Input image size: {W}x{H}, Model: {Model}, Scale: {Scale}", w, h, model, scale);
// 撖孵之曆蝙埈綫閙活/OOM
const int TileSize = 256;
bool useTiling = (model.StartsWith("EDSR", StringComparison.OrdinalIgnoreCase)) && (h > TileSize || w > TileSize);
if (useTiling)
{
return ProcessTiled(session, inputImage, scale, TileSize);
}
return ProcessSingle(session, inputImage, scale);
}
/// <summary>
/// 閙活 FSRCNN嚗?
/// </summary>
private Image<Gray, byte> ProcessSingle(InferenceSession session, Image<Gray, byte> inputImage, int scale)
{
int h = inputImage.Height;
int w = inputImage.Width;
// 璅∪颲枏靽⊥
string inputName = session.InputMetadata.Keys.First();
var inputMeta = session.InputMetadata[inputName];
int[] dims = inputMeta.Dimensions;
// dims : [1, H, W, C] (NHWC)嚗龦 ?1 ?3
int inputChannels = dims[^1]; // 𦒘蝏湔糓𡁻?
// 遣颲枏 tensor: [1, H, W, C] (NHWC)
// 雿輻鍂摨訫 + Parallel.For 𣂼蝝删揣撘訫
DenseTensor<float> inputTensor;
if (inputChannels == 1)
{
// FSRCNN: 𡁻啣漲颲枏
inputTensor = new DenseTensor<float>(new[] { 1, h, w, 1 });
float[] buf = inputTensor.Buffer.ToArray();
var imgData = inputImage.Data;
Parallel.For(0, h, y =>
{
int rowOffset = y * w;
for (int x = 0; x < w; x++)
buf[rowOffset + x] = imgData[y, x, 0];
});
inputTensor = new DenseTensor<float>(buf, new[] { 1, h, w, 1 });
}
else
{
// EDSR: 銝厰𡁻 BGR 颲枏
using var colorInput = new Image<Bgr, byte>(w, h);
CvInvoke.CvtColor(inputImage, colorInput, ColorConversion.Gray2Bgr);
var buf = new float[h * w * 3];
var imgData = colorInput.Data;
Parallel.For(0, h, y =>
{
int rowOffset = y * w * 3;
for (int x = 0; x < w; x++)
{
int px = rowOffset + x * 3;
buf[px] = imgData[y, x, 0];
buf[px + 1] = imgData[y, x, 1];
buf[px + 2] = imgData[y, x, 2];
}
});
inputTensor = new DenseTensor<float>(buf, new[] { 1, h, w, 3 });
}
//
var inputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor(inputName, inputTensor)
};
using var results = session.Run(inputs);
var outputTensor = results.First().AsTensor<float>();
// 颲枏枂 shape: [1, C, H*scale, W*scale] (NCHW嚗峕芋餈?Transpose)
var shape = outputTensor.Dimensions;
int outC = shape[1];
int outH = shape[2];
int outW = shape[3];
// 頧祆揢銝箇摨血㦛?
// 雿輻鍂 Parallel.For + 湔𦻖
Image<Gray, byte> result;
if (outC == 1)
{
// FSRCNN: 𡁻颲枏枂 [1, 1, outH, outW]
result = new Image<Gray, byte>(outW, outH);
var outData = result.Data;
Parallel.For(0, outH, y =>
{
for (int x = 0; x < outW; x++)
outData[y, x, 0] = (byte)Math.Clamp((int)outputTensor[0, 0, y, x], 0, 255);
});
}
else
{
// EDSR: 銝厰𡁻颲枏枂 [1, 3, outH, outW] ?啣漲
// 湔𦻖霈∠啣漲頝唾銝剝𡢿 BGR
result = new Image<Gray, byte>(outW, outH);
var outData = result.Data;
Parallel.For(0, outH, y =>
{
for (int x = 0; x < outW; x++)
{
float b = outputTensor[0, 0, y, x];
float g = outputTensor[0, 1, y, x];
float r = outputTensor[0, 2, y, x];
// BT.601 啣漲: 0.299*R + 0.587*G + 0.114*B
int gray = (int)(0.299f * r + 0.587f * g + 0.114f * b);
outData[y, x, 0] = (byte)Math.Clamp(gray, 0, 255);
}
});
}
_logger.Debug("ProcessSingle: Scale={Scale}, Output={W}x{H}", scale, outW, outH);
return result;
}
/// <summary>
/// ?EDSR嚗㚁𣂼急綫潭𦻖
/// </summary>
private Image<Gray, byte> ProcessTiled(InferenceSession session, Image<Gray, byte> inputImage, int scale, int tileSize)
{
int h = inputImage.Height;
int w = inputImage.Width;
int overlap = 8; // 撠烐𣄽亥器蝻䀝憚敶?
var result = new Image<Gray, byte>(w * scale, h * scale);
int tilesX = (int)Math.Ceiling((double)w / (tileSize - overlap));
int tilesY = (int)Math.Ceiling((double)h / (tileSize - overlap));
_logger.Information("Tiled processing: {TilesX}x{TilesY} tiles, tileSize={TileSize}", tilesX, tilesY, tileSize);
for (int ty = 0; ty < tilesY; ty++)
{
for (int tx = 0; tx < tilesX; tx++)
{
int srcX = Math.Min(tx * (tileSize - overlap), w - tileSize);
int srcY = Math.Min(ty * (tileSize - overlap), h - tileSize);
srcX = Math.Max(srcX, 0);
srcY = Math.Max(srcY, 0);
int tw = Math.Min(tileSize, w - srcX);
int th = Math.Min(tileSize, h - srcY);
// 鋆 tile
inputImage.ROI = new System.Drawing.Rectangle(srcX, srcY, tw, th);
var tile = inputImage.Copy();
inputImage.ROI = System.Drawing.Rectangle.Empty;
// 蓥葵 tile
var srTile = ProcessSingle(session, tile, scale);
tile.Dispose();
// 蝏𤘪
int dstX = srcX * scale;
int dstY = srcY * scale;
result.ROI = new System.Drawing.Rectangle(dstX, dstY, srTile.Width, srTile.Height);
srTile.CopyTo(result);
result.ROI = System.Drawing.Rectangle.Empty;
srTile.Dispose();
}
}
_logger.Debug("ProcessTiled: Scale={Scale}, Output={W}x{H}", scale, result.Width, result.Height);
return result;
}
/// <summary>
/// 交𪄳璅∪辣嚗峕隡睃蝥扳蝝W銝芰𤌍敶𤏪.onnx 嚗?
/// </summary>
private static string FindModelFile(string model, int scale)
{
string baseDir = AppDomain.CurrentDomain.BaseDirectory;
string fileName = $"{model}_x{scale}.onnx";
string[] searchPaths = new[]
{
Path.Combine(baseDir, "Models", fileName),
Path.Combine(baseDir, fileName),
Path.Combine(Directory.GetCurrentDirectory(), "Models", fileName),
Path.Combine(Directory.GetCurrentDirectory(), fileName),
};
foreach (var path in searchPaths)
{
if (File.Exists(path))
{
_logger.Debug("Found model file: {Path}", path);
return path;
}
}
_logger.Warning("Model file not found: {Model}_x{Scale}.onnx", model, scale);
return string.Empty;
}
}