跳至主要内容

T15_0929

1.LAB 調整 + CLAHE

圖片轉到 LAB 色彩空間(L=亮度,a/b=顏色)。

對 L 通道用 CLAHE (Contrast Limited Adaptive Histogram Equalization):

將圖片分成小格子 (tileGrid=8)。

在每個格子內做直方圖均衡化。

加上 clipLimit 限制,避免過度增強噪點。

2.Gray-World 白平衡(線性域) image

3.膚色保護 (Skin Mask) 設定膚色範圍(Cb ∈ [85,135], Cr ∈ [135,180])。

產生 mask,膚色區域值=255,其它=0。

對 mask 做形態學開運算 + GaussianBlur → 平滑邊界。

在混合時:膚色區域用「原圖」權重高,白平衡圖權重低(保護膚色不被漂白/過粉)。 👉 作用:臉部顏色更自然。

4.computeStats (影像統計)

對比前後影像,計算以下指標:

meanY:灰階亮度均值 → 亮度。

stdY:亮度標準差 → 對比。

meanSat:平均飽和度 → 飽和度。

rbDiff:R-B 均值差 → 色溫 / 藍色色調。

直方圖分位數 (p1, p99) → 黑點 / 白點。

dynamicRange = p99 - p1 → HDR 效果。

async function autoAdjustPro_fromCanvas(canvas) {
await ensureOpenCVReady();
const ctx = canvas.getContext("2d", { willReadFrequently:true });
const srcData = ctx.getImageData(0,0,canvas.width,canvas.height);

// 1. 轉換圖像格式
const srcBGR = matFromImageData(srcData);

// 2. 計算原始統計特徵
const stats0 = computeStats(srcBGR);

// 3. 應用 CLAHE(對比度限制自適應直方圖均衡化)
const claheBGR = applyCLAHE_L_on_Lab(srcBGR, {
clipLimit: 2.8, // 對比度限制閾值
tileGrid: 8 // 8x8 網格分區處理
});

// 4. 應用灰度世界白平衡(含膚色保護)
const wbBGR = grayWorldWB_withSkin(claheBGR, {
gainClamp: [0.75, 1.25], // 增益限制範圍
skinKeep: 0.6 // 膚色保護強度 60%
});

// 5. 計算處理後統計特徵
const stats1 = computeStats(wbBGR);

// 6. 生成智能調整建議
const suggestion = generateAISuggestion(stats0, stats1);

// 7. 輸出處理結果
const outData = imageDataFromMatBGR(wbBGR);
ctx.putImageData(outData, 0, 0);
const previewDataURL = canvas.toDataURL("image/jpeg", 0.9);

// 8. 清理記憶體
srcBGR.delete(); claheBGR.delete(); wbBGR.delete();

return { suggestion, previewDataURL };
}
async function autoAdjustPro_fromCanvas(canvas) {
let srcBGR, claheBGR, wbBGR;

try {
await ensureOpenCVReady();

// 驗證 Canvas 有效性
if (!canvas || canvas.width === 0 || canvas.height === 0) {
throw new Error("Invalid canvas dimensions");
}

const ctx = canvas.getContext("2d", { willReadFrequently: true });
const srcData = ctx.getImageData(0, 0, canvas.width, canvas.height);

// 驗證 ImageData
if (!srcData || !srcData.data || srcData.data.length === 0) {
throw new Error("Failed to extract image data from canvas");
}

console.log(`Processing image: ${canvas.width}×${canvas.height}, data length: ${srcData.data.length}`);

srcBGR = matFromImageData(srcData);
if (!isValidMat(srcBGR)) {
throw new Error("Failed to create valid Mat from ImageData");
}

// 處理流程...
const stats0 = computeStats(srcBGR);
claheBGR = applyCLAHE_L_on_Lab(srcBGR, { clipLimit: 2.8, tileGrid: 8 });
wbBGR = grayWorldWB_withSkin(claheBGR, { gainClamp: [0.75, 1.25], skinKeep: 0.6 });
const stats1 = computeStats(wbBGR);

// 生成建議
const suggestion = generateAdvancedSuggestion(stats0, stats1, canvas.width, canvas.height);

// 輸出結果
const outData = imageDataFromMatBGR(wbBGR);
ctx.putImageData(outData, 0, 0);
const previewDataURL = canvas.toDataURL("image/jpeg", 0.9);

return { suggestion, previewDataURL };

} catch (error) {
console.error("AI Pro processing failed:", error);
throw new Error(`AI Pro 處理失敗: ${error.message}`);
} finally {
// 確保記憶體清理
if (srcBGR && !srcBGR.isDeleted()) srcBGR.delete();
if (claheBGR && !claheBGR.isDeleted()) claheBGR.delete();
if (wbBGR && !wbBGR.isDeleted()) wbBGR.delete();
}
}
function generateAdvancedSuggestion(stats0, stats1, width, height) {
const clamp100 = (v) => Math.max(-100, Math.min(100, Math.round(v)));

// 圖像尺寸因子(大圖可用更強處理)
const sizeFactor = Math.min(1.0, (width * height) / (1920 * 1080));

// 基礎調整計算
const dMean = stats1.meanY - stats0.meanY;
const dStd = stats1.stdY - stats0.stdY;
const dSat = stats1.meanSat - stats0.meanSat;
const dRB = stats1.rbDiff - stats0.rbDiff;
const dP1 = stats1.p1 - stats0.p1;
const dP99 = stats1.p99 - stats0.p99;
const dDR = stats1.dynamicRange - stats0.dynamicRange;

// 智能參數計算
const brightness = clamp100(dMean / 1.6);
const contrast = clamp100((dStd / 55) * 60 * sizeFactor);
const saturation = clamp100(dSat * 450);
const temperature = clamp100((-dRB) * 90);
const blueTone = clamp100((-dRB) * 30);

// 動態範圍調整
const blackPoint = clamp100((-dP1) * 4);
const whitePoint = clamp100((dP99) * -4);
const hdr = clamp100(dDR * 0.35);

// 智能鮮明度(基於飽和度和動態範圍)
const vividness = clamp100(dSat * 220 + dDR * 0.05);

// 自適應銳利化(基於圖像複雜度)
const complexity = stats1.stdY / 255; // 標準差作為複雜度指標
const baseSharpness = 8;
const sharpness = clamp100(baseSharpness * (0.5 + complexity * 0.5));

// 場景檢測和特殊處理
const isLowKey = stats1.meanY < 0.3; // 暗調圖像
const isHighKey = stats1.meanY > 0.7; // 亮調圖像
const isLowContrast = stats1.stdY < 30; // 低對比度

let adjustedSuggestion = {
藍色色調: blueTone,
色溫: temperature,
飽和度: saturation,
色調: 0,
鮮明度: vividness,
亮度: brightness,
對比: contrast,
白點: whitePoint,
HDR效果: hdr,
陰影: isLowKey ? clamp100(brightness * 0.3) : 0, // 暗調圖像提亮陰影
黑點: blackPoint,
銳利化: sharpness,
降噪: isLowKey ? 5 : 0, // 暗調圖像需要降噪
曝影: isHighKey ? clamp100(-brightness * 0.2) : 0, // 亮調圖像壓縮高光
};

// 記錄處理資訊
console.log("AI Pro 場景分析:", {
場景類型: isLowKey ? "暗調" : isHighKey ? "亮調" : "中調",
對比度: isLowContrast ? "低" : "正常",
圖像複雜度: (complexity * 100).toFixed(1) + "%",
處理強度: sizeFactor.toFixed(2)
});

return adjustedSuggestion;
}
function skinMask(matBGR) {
const ycrcb = new cv.Mat();
cv.cvtColor(matBGR, ycrcb, cv.COLOR_BGR2YCrCb);
const mv = new cv.MatVector();
cv.split(ycrcb, mv);
const Y = mv.get(0), Cr = mv.get(1), Cb = mv.get(2);

const mask = new cv.Mat();
const m1 = new cv.Mat(), m2 = new cv.Mat(), m3 = new cv.Mat();

// 多層次膚色檢測
// 第一層:基礎膚色範圍
const cbLow1 = new cv.Mat(Cb.rows, Cb.cols, Cb.type(), new cv.Scalar(77));
const cbHigh1 = new cv.Mat(Cb.rows, Cb.cols, Cb.type(), new cv.Scalar(127));
const crLow1 = new cv.Mat(Cr.rows, Cr.cols, Cr.type(), new cv.Scalar(133));
const crHigh1 = new cv.Mat(Cr.rows, Cr.cols, Cr.type(), new cv.Scalar(173));

// 第二層:擴展膚色範圍(亞洲膚色)
const cbLow2 = new cv.Mat(Cb.rows, Cb.cols, Cb.type(), new cv.Scalar(85));
const cbHigh2 = new cv.Mat(Cb.rows, Cb.cols, Cb.type(), new cv.Scalar(135));
const crLow2 = new cv.Mat(Cr.rows, Cr.cols, Cr.type(), new cv.Scalar(135));
const crHigh2 = new cv.Mat(Cr.rows, Cr.cols, Cr.type(), new cv.Scalar(180));

// 亮度約束(避免過暗或過亮區域)
const yLow = new cv.Mat(Y.rows, Y.cols, Y.type(), new cv.Scalar(80));
const yHigh = new cv.Mat(Y.rows, Y.cols, Y.type(), new cv.Scalar(230));

cv.inRange(Cb, cbLow1, cbHigh1, m1);
cv.inRange(Cr, crLow1, crHigh1, m2);
cv.bitwise_and(m1, m2, mask);

// 添加第二層檢測
cv.inRange(Cb, cbLow2, cbHigh2, m1);
cv.inRange(Cr, crLow2, crHigh2, m2);
cv.bitwise_and(m1, m2, m3);
cv.bitwise_or(mask, m3, mask);

// 添加亮度約束
cv.inRange(Y, yLow, yHigh, m1);
cv.bitwise_and(mask, m1, mask);

// 形態學處理(移除噪點、填補空洞)
const k1 = cv.getStructuringElement(cv.MORPH_ELLIPSE, new cv.Size(3, 3));
const k2 = cv.getStructuringElement(cv.MORPH_ELLIPSE, new cv.Size(5, 5));

cv.morphologyEx(mask, mask, cv.MORPH_OPEN, k1); // 移除小噪點
cv.morphologyEx(mask, mask, cv.MORPH_CLOSE, k2); // 填補空洞

// 高斯模糊柔化邊緣
cv.GaussianBlur(mask, mask, new cv.Size(7, 7), 0);

// 清理資源
ycrcb.delete(); mv.delete(); Y.delete(); Cr.delete(); Cb.delete();
m1.delete(); m2.delete(); m3.delete(); k1.delete(); k2.delete();
cbLow1.delete(); cbHigh1.delete(); crLow1.delete(); crHigh1.delete();
cbLow2.delete(); cbHigh2.delete(); crLow2.delete(); crHigh2.delete();
yLow.delete(); yHigh.delete();

return mask;
}