Browse Source

还是有色块的中间版本

ManualUV
hesuicong 2 weeks ago
parent
commit
0e1b02cda5
  1. 799
      libs/MVS/SceneTexture.cpp

799
libs/MVS/SceneTexture.cpp

@ -111,7 +111,13 @@ struct TRWSInference {
typedef MRFEnergy<TypePotts>::Options MRFOptions; typedef MRFEnergy<TypePotts>::Options MRFOptions;
CAutoPtr<MRFEnergyType> mrf; CAutoPtr<MRFEnergyType> mrf;
CAutoPtrArr<MRFEnergyType::NodeId> nodes; CAutoPtrArr<MRFEnerg
void MeshTexture::ApplyColorConsistencyOptimization(
Image8U3& texture,
const std::vector<std::vector<PixelSample>>& pixelSamples,
int textureSize,
PyType::NodeId> nodes;
inline TRWSInference() {} inline TRWSInference() {}
void Init(NodeID nNodes, LabelID nLabels) { void Init(NodeID nNodes, LabelID nLabels) {
@ -186,6 +192,14 @@ struct PatchQualityInfo {
std::vector<float> faceQualities; std::vector<float> faceQualities;
}; };
// 在函数外部定义 PixelSample
struct PixelSample {
cv::Vec3f color; // 颜色(BGR格式)
float weight; // 权重
IIndex viewIdx; // 视图索引
};
std::vector<PatchQualityInfo> patchQualityInfos; std::vector<PatchQualityInfo> patchQualityInfos;
struct MeshTexture { struct MeshTexture {
@ -593,6 +607,12 @@ public:
unsigned nTextureSizeMultiple, unsigned nTextureSizeMultiple,
Pixel8U colEmpty, Pixel8U colEmpty,
float fSharpnessWeight); float fSharpnessWeight);
void FillTextureGaps(Image8U3& texture, Pixel8U colEmpty);
void ApplyColorConsistencyOptimization(
Image8U3& texture,
const std::vector<std::vector<PixelSample>>& pixelSamples,
int textureSize,
Pixel8U colEmpty);
void ApplyGlobalColorCorrection(Image8U3& texture, Pixel8U colEmpty, float strength); void ApplyGlobalColorCorrection(Image8U3& texture, Pixel8U colEmpty, float strength);
void ApplySoftSharpening(Image8U3& texture, float strength, Pixel8U colEmpty); void ApplySoftSharpening(Image8U3& texture, float strength, Pixel8U colEmpty);
Mesh::Image8U3Arr GenerateTextureAtlasFromUV( Mesh::Image8U3Arr GenerateTextureAtlasFromUV(
@ -13703,6 +13723,129 @@ bool MeshTexture::TextureWithExistingUV(
return false; return false;
} }
void MeshTexture::FillTextureGaps(Image8U3& texture, Pixel8U colEmpty) {
if (texture.empty()) return;
cv::Mat textureMat = (cv::Mat&)texture;
// 创建掩码
cv::Mat mask = cv::Mat::zeros(texture.rows, texture.cols, CV_8UC1);
for (int y = 0; y < texture.rows; ++y) {
for (int x = 0; x < texture.cols; ++x) {
if (texture(y, x) != colEmpty) {
mask.at<uchar>(y, x) = 255;
}
}
}
// 使用图像修复算法填充空隙
cv::Mat inpainted;
cv::inpaint(textureMat, mask, inpainted, 3, cv::INPAINT_NS);
// 只替换空区域
for (int y = 0; y < texture.rows; ++y) {
for (int x = 0; x < texture.cols; ++x) {
if (texture(y, x) == colEmpty) {
cv::Vec3b color = inpainted.at<cv::Vec3b>(y, x);
Pixel8U pixel;
pixel.b = color[0];
pixel.g = color[1];
pixel.r = color[2];
texture(y, x) = pixel;
}
}
}
}
void MeshTexture::ApplyColorConsistencyOptimization(
Image8U3& texture,
const std::vector<std::vector<PixelSample>>& pixelSamples,
int textureSize,
Pixel8U colEmpty)
{
if (pixelSamples.empty()) return;
cv::Mat textureMat = (cv::Mat&)texture;
// 创建临时的颜色缓冲区
cv::Mat3f newColor(texture.rows, texture.cols, cv::Vec3f(0, 0, 0));
cv::Mat1f newWeight(texture.rows, texture.cols, 0.0f);
// 计算邻域颜色一致性
const int kernelSize = 3;
const int halfKernel = kernelSize / 2;
for (int y = halfKernel; y < texture.rows - halfKernel; ++y) {
for (int x = halfKernel; x < texture.cols - halfKernel; ++x) {
int pixelIdx = y * textureSize + x;
const auto& samples = pixelSamples[pixelIdx];
if (samples.empty()) continue;
// 收集邻域颜色
std::vector<cv::Vec3f> neighborColors;
for (int dy = -halfKernel; dy <= halfKernel; ++dy) {
for (int dx = -halfKernel; dx <= halfKernel; ++dx) {
int nx = x + dx;
int ny = y + dy;
int neighborIdx = ny * textureSize + nx;
if (!pixelSamples[neighborIdx].empty()) {
// 获取当前像素颜色
Pixel8U pixel = texture(ny, nx);
if (pixel != colEmpty) {
neighborColors.push_back(cv::Vec3f(pixel.b, pixel.g, pixel.r));
}
}
}
}
if (neighborColors.size() < 3) continue; // 需要足够的邻域信息
// 计算颜色统计
cv::Vec3f mean(0, 0, 0);
for (const auto& color : neighborColors) {
mean += color;
}
mean /= (float)neighborColors.size();
// 计算方差
cv::Vec3f variance(0, 0, 0);
for (const auto& color : neighborColors) {
cv::Vec3f diff = color - mean;
variance[0] += diff[0] * diff[0];
variance[1] += diff[1] * diff[1];
variance[2] += diff[2] * diff[2];
}
variance /= (float)neighborColors.size();
// 获取当前像素颜色
Pixel8U currentPixel = texture(y, x);
cv::Vec3f currentColor(currentPixel.b, currentPixel.g, currentPixel.r);
// 计算颜色调整
const float alpha = 0.3f; // 调整强度
cv::Vec3f adjustedColor = currentColor;
// 如果颜色差异较大,向邻域均值调整
cv::Vec3f diff = currentColor - mean;
float diffMag = std::sqrt(diff[0]*diff[0] + diff[1]*diff[1] + diff[2]*diff[2]);
if (diffMag > 10.0f) { // 颜色差异阈值
adjustedColor = currentColor * (1.0f - alpha) + mean * alpha;
}
// 应用调整
Pixel8U newPixel;
newPixel.r = (unsigned char)cv::saturate_cast<uchar>(adjustedColor[2]);
newPixel.g = (unsigned char)cv::saturate_cast<uchar>(adjustedColor[1]);
newPixel.b = (unsigned char)cv::saturate_cast<uchar>(adjustedColor[0]);
texture(y, x) = newPixel;
}
}
}
Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge( Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
const LabelArr& faceLabels, const LabelArr& faceLabels,
const IIndexArr& views, const IIndexArr& views,
@ -13713,7 +13856,10 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
Pixel8U colEmpty, Pixel8U colEmpty,
float fSharpnessWeight) float fSharpnessWeight)
{ {
DEBUG_EXTRA("GenerateTextureAtlasWith3DBridge - 使用3D几何坐标作为桥梁,修复白平衡问题"); DEBUG_EXTRA("GenerateTextureAtlasWith3DBridge - 使用3D几何坐标和多视图融合");
// 定义INVALID_INDEX常量
const IIndex INVALID_INDEX = (IIndex)-1;
// 1. 分析外部UV布局 // 1. 分析外部UV布局
AABB2f uvBounds(true); AABB2f uvBounds(true);
@ -13746,9 +13892,6 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
// 使用统一的背景色设置 // 使用统一的背景色设置
DEBUG_EXTRA("设置背景色: RGB(%d,%d,%d)", colEmpty.r, colEmpty.g, colEmpty.b); DEBUG_EXTRA("设置背景色: RGB(%d,%d,%d)", colEmpty.r, colEmpty.g, colEmpty.b);
// 注意:Image8U3的setTo使用cv::Scalar,是BGR顺序
// 但colEmpty是RGB顺序,需要转换
cv::Scalar cvEmpty(colEmpty.b, colEmpty.g, colEmpty.r); cv::Scalar cvEmpty(colEmpty.b, colEmpty.g, colEmpty.r);
textureAtlas.setTo(cvEmpty); textureAtlas.setTo(cvEmpty);
@ -13757,120 +13900,265 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
uvBounds.ptMin.x(), uvBounds.ptMin.y(), uvBounds.ptMin.x(), uvBounds.ptMin.y(),
uvBounds.ptMax.x(), uvBounds.ptMax.y()); uvBounds.ptMax.x(), uvBounds.ptMax.y());
// 2. 为每个视图创建颜色统计 std::vector<std::vector<PixelSample>> pixelSamples(textureSize * textureSize);
std::vector<cv::Vec3d> viewColorSums(images.size(), cv::Vec3d(0, 0, 0));
std::vector<int> viewPixelCounts(images.size(), 0);
std::vector<std::vector<cv::Vec3b>> viewColorSamples(images.size());
// 3. 第一次遍历:收集每个视图的颜色统计 // 3. 第一次遍历:收集所有视图的采样
DEBUG_EXTRA("第一次遍历:收集视图颜色统计"); DEBUG_EXTRA("第一次遍历:收集多视图采样");
int totalSamples = 0;
#ifdef _USE_OPENMP #ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) #pragma omp parallel for schedule(dynamic) reduction(+:totalSamples)
#endif #endif
for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) { for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) {
const FIndex faceID = (FIndex)idxFace; const FIndex faceID = (FIndex)idxFace;
const Label label = faceLabels[faceID]; const Label label = faceLabels[faceID];
if (label == 0) continue; if (label == 0) continue;
const IIndex idxView = label - 1; const IIndex primaryView = label - 1;
if (idxView >= images.size()) continue; if (primaryView >= images.size()) continue;
const Image& sourceImage = images[idxView];
const TexCoord* meshUVs = &scene.mesh.faceTexcoords[faceID * 3]; const TexCoord* meshUVs = &scene.mesh.faceTexcoords[faceID * 3];
const Face& face = scene.mesh.faces[faceID]; const Face& face = scene.mesh.faces[faceID];
const Image& sourceImage = images[primaryView];
// 在面的中心采样几个点 // 计算面法线
for (int i = 0; i < 3; ++i) { // 采样三个顶点 Vertex v0 = vertices[face[1]] - vertices[face[0]];
const Vertex& worldPoint = vertices[face[i]]; Vertex v1 = vertices[face[2]] - vertices[face[0]];
Vertex crossProduct = v0.cross(v1);
// 检查3D点是否在相机前方 double normVal = cv::norm(crossProduct);
if (!sourceImage.camera.IsInFront(worldPoint)) { Vertex normal(0, 0, 1); // 默认法向量
continue; if (normVal > 0) {
normal = crossProduct / (float)normVal; // 归一化
}
// 计算面的UV边界
AABB2f faceBounds(true);
for (int i = 0; i < 3; ++i) {
faceBounds.InsertFull(meshUVs[i]);
}
int startX = (int)(faceBounds.ptMin.x() * textureSize);
int startY = (int)(faceBounds.ptMin.y() * textureSize);
int endX = (int)(faceBounds.ptMax.x() * textureSize);
int endY = (int)(faceBounds.ptMax.y() * textureSize);
startX = std::max(0, std::min(startX, textureSize - 1));
startY = std::max(0, std::min(startY, textureSize - 1));
endX = std::max(0, std::min(endX, textureSize - 1));
endY = std::max(0, std::min(endY, textureSize - 1));
if (startX >= endX || startY >= endY) continue;
// 采样密度
const int sampleStep = 1; // 每个像素都采样
int faceSamples = 0;
for (int y = startY; y <= endY; y += sampleStep) {
for (int x = startX; x <= endX; x += sampleStep) {
const Point2f texCoord((x + 0.5f) / textureSize, (y + 0.5f) / textureSize);
// 计算重心坐标
Point3f barycentric;
if (!PointInTriangle(texCoord, meshUVs[0], meshUVs[1], meshUVs[2], barycentric)) {
continue;
}
// 计算3D点
const Vertex worldPoint =
vertices[face[0]] * barycentric.x +
vertices[face[1]] * barycentric.y +
vertices[face[2]] * barycentric.z;
// 检查3D点是否在相机前方
if (!sourceImage.camera.IsInFront(worldPoint)) {
continue;
}
// 投影到图像
Point2f imgPoint = sourceImage.camera.ProjectPointP(worldPoint);
// 确保在图像范围内
if (imgPoint.x < 0 || imgPoint.x >= sourceImage.image.cols ||
imgPoint.y < 0 || imgPoint.y >= sourceImage.image.rows) {
continue;
}
// 采样图像
Pixel8U sampledColor = SampleImageBilinear(sourceImage.image, imgPoint);
// 计算权重
float weight = 1.0f; // 默认权重
// 计算第一个顶点在图像上的投影(用于分辨率计算)
Point2f proj0 = sourceImage.camera.ProjectPointP(vertices[face[0]]);
// 计算到相机中心的距离
Vertex cameraPos(sourceImage.camera.C.x, sourceImage.camera.C.y, sourceImage.camera.C.z);
float dist = cv::norm(cameraPos - worldPoint);
// 计算视角与法线夹角
Vertex viewDir = cameraPos - worldPoint;
float viewDirNorm = cv::norm(viewDir);
if (viewDirNorm > 0) {
viewDir = viewDir / viewDirNorm;
float viewAngle = std::abs(viewDir.dot(normal));
// 计算分辨率
float resolution = 1.0f;
double projDist = cv::norm(imgPoint - proj0);
if (projDist > 1e-6) {
resolution = 1.0f / (float)projDist;
}
// 综合权重 = 视角质量 * 分辨率
weight = viewAngle * resolution;
// 添加距离衰减
float distFactor = std::exp(-dist * 0.001f);
weight *= distFactor;
// 限制权重范围
weight = std::max(0.5f, std::min(2.0f, weight));
}
// 存储采样
int pixelIdx = y * textureSize + x;
PixelSample sample;
sample.color = cv::Vec3f(sampledColor.b, sampledColor.g, sampledColor.r); // 转换为BGR
sample.weight = weight;
sample.viewIdx = primaryView;
#ifdef _USE_OPENMP
#pragma omp critical
#endif
{
pixelSamples[pixelIdx].push_back(sample);
}
faceSamples++;
} }
}
totalSamples += faceSamples;
}
DEBUG_EXTRA("采样完成: 总采样点 %d", totalSamples);
// 4. 第二次遍历:从其他视图补充采样
DEBUG_EXTRA("第二次遍历:从其他视图补充采样");
// 找出每个面的其他可见视图
std::vector<std::vector<IIndex>> faceVisibleViews(scene.mesh.faces.size());
#ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic)
#endif
for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) {
const FIndex faceID = (FIndex)idxFace;
const Face& face = scene.mesh.faces[faceID];
// 计算面法线
Vertex v0 = vertices[face[1]] - vertices[face[0]];
Vertex v1 = vertices[face[2]] - vertices[face[0]];
Vertex crossProduct = v0.cross(v1);
double normVal = cv::norm(crossProduct);
Vertex normal(0, 0, 1); // 默认法向量
if (normVal > 0) {
normal = crossProduct / (float)normVal; // 归一化
}
// 计算面中心
Vertex faceCenter(0, 0, 0);
for (int v = 0; v < 3; ++v) {
faceCenter += vertices[face[v]];
}
faceCenter /= 3.0f;
// 检查所有视图
std::vector<std::pair<float, IIndex>> viewScores;
for (IIndex i = 0; i < images.size(); ++i) {
const Image& img = images[i];
Point2f imgPoint = sourceImage.camera.ProjectPointP(worldPoint); // 检查可见性
if (!img.camera.IsInFront(faceCenter)) continue;
// 确保在图像范围内 // 计算投影点
if (imgPoint.x < 0 || imgPoint.x >= sourceImage.image.cols || Point2f imgPoint = img.camera.ProjectPointP(faceCenter);
imgPoint.y < 0 || imgPoint.y >= sourceImage.image.rows) { if (imgPoint.x < 0 || imgPoint.x >= img.image.cols ||
imgPoint.y < 0 || imgPoint.y >= img.image.rows) {
continue; continue;
} }
// 采样图像 // 计算视角方向
Pixel8U sampledColor = SampleImageBilinear(sourceImage.image, imgPoint); Vertex cameraPos(img.camera.C.x, img.camera.C.y, img.camera.C.z);
Vertex viewDirVec = cameraPos - faceCenter;
float viewDirNorm = cv::norm(viewDirVec);
// 存储采样颜色 if (viewDirNorm <= 1e-6) continue; // 避免除以零
#ifdef _USE_OPENMP
#pragma omp critical Vertex viewDir = viewDirVec / viewDirNorm;
#endif
{ // 计算视角质量(法线与视角方向的夹角)
viewColorSamples[idxView].push_back(cv::Vec3b(sampledColor.b, sampledColor.g, sampledColor.r)); float viewAngle = std::abs(viewDir.dot(normal));
// 计算分辨率 - 计算两个顶点投影点之间的欧氏距离
Point2f proj0 = img.camera.ProjectPointP(vertices[face[0]]);
double projDist = cv::norm(imgPoint - proj0);
// 如果两个投影点太近,则跳过
if (projDist < 1e-6) continue;
float resolution = 1.0f / (float)projDist;
// 综合得分
float score = viewAngle * resolution;
if (score > 0.1f) { // 阈值
viewScores.push_back({score, i});
} }
} }
}
// 4. 计算每个视图的颜色调整因子
std::vector<cv::Vec3d> viewColorAdjustments(images.size(), cv::Vec3d(1.0, 1.0, 1.0));
cv::Vec3d globalMean(0, 0, 0);
int globalSampleCount = 0;
for (size_t i = 0; i < images.size(); ++i) {
if (viewColorSamples[i].empty()) continue;
cv::Vec3d sum(0, 0, 0); // 按得分排序
for (const auto& pixel : viewColorSamples[i]) { std::sort(viewScores.begin(), viewScores.end(),
sum[0] += pixel[0]; // B [](const auto& a, const auto& b) { return a.first > b.first; });
sum[1] += pixel[1]; // G
sum[2] += pixel[2]; // R
}
cv::Vec3d mean = sum / (double)viewColorSamples[i].size();
globalMean += sum;
globalSampleCount += viewColorSamples[i].size();
DEBUG_EXTRA("视图 %zu: 平均颜色 B=%.1f, G=%.1f, R=%.1f, 样本数=%zu", // 选择前4个最佳视图
i, mean[0], mean[1], mean[2], viewColorSamples[i].size()); faceVisibleViews[idxFace].push_back(faceLabels[idxFace] - 1); // 主视图
} for (size_t i = 0; i < std::min(viewScores.size(), (size_t)3); ++i) {
if (viewScores[i].second != faceLabels[idxFace] - 1) {
// 计算全局平均颜色 faceVisibleViews[idxFace].push_back(viewScores[i].second);
if (globalSampleCount > 0) { }
globalMean /= globalSampleCount; }
DEBUG_EXTRA("全局平均颜色: B=%.1f, G=%.1f, R=%.1f",
globalMean[0], globalMean[1], globalMean[2]);
} }
// 5. 采样纹理 // 5. 从其他视图采样
DEBUG_EXTRA("第二次遍历:采样纹理并应用颜色校正"); int additionalSamples = 0;
cv::Mat1f weightAccum(textureSize, textureSize, 0.0f);
cv::Mat3f colorAccum(textureSize, textureSize, cv::Vec3f(0, 0, 0));
int processedFaces = 0;
int sampledPixels = 0;
int failedFaces = 0;
#ifdef _USE_OPENMP #ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) reduction(+:processedFaces, sampledPixels, failedFaces) #pragma omp parallel for schedule(dynamic) reduction(+:additionalSamples)
#endif #endif
for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) { for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) {
const FIndex faceID = (FIndex)idxFace; const FIndex faceID = (FIndex)idxFace;
const Label label = faceLabels[faceID]; const Label label = faceLabels[faceID];
if (label == 0) { if (label == 0) continue;
failedFaces++;
continue;
}
const IIndex idxView = label - 1;
if (idxView >= images.size()) {
failedFaces++;
continue;
}
const TexCoord* meshUVs = &scene.mesh.faceTexcoords[faceID * 3]; const TexCoord* meshUVs = &scene.mesh.faceTexcoords[faceID * 3];
const Face& face = scene.mesh.faces[faceID]; const Face& face = scene.mesh.faces[faceID];
const Image& sourceImage = images[idxView];
// 计算面法线
Vertex v0 = vertices[face[1]] - vertices[face[0]];
Vertex v1 = vertices[face[2]] - vertices[face[0]];
Vertex crossProduct = v0.cross(v1);
double normVal = cv::norm(crossProduct);
Vertex normal(0, 0, 1); // 默认法向量
if (normVal > 0) {
normal = crossProduct / (float)normVal; // 归一化
}
// 获取可见视图
const auto& visibleViews = faceVisibleViews[idxFace];
if (visibleViews.size() <= 1) continue; // 只有一个视图
// 计算面的UV边界 // 计算面的UV边界
AABB2f faceBounds(true); AABB2f faceBounds(true);
@ -13888,14 +14176,9 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
endX = std::max(0, std::min(endX, textureSize - 1)); endX = std::max(0, std::min(endX, textureSize - 1));
endY = std::max(0, std::min(endY, textureSize - 1)); endY = std::max(0, std::min(endY, textureSize - 1));
if (startX >= endX || startY >= endY) { if (startX >= endX || startY >= endY) continue;
failedFaces++;
continue;
}
int faceSampledPixels = 0; // 对每个像素,从其他视图采样
// 采样纹理
for (int y = startY; y <= endY; ++y) { for (int y = startY; y <= endY; ++y) {
for (int x = startX; x <= endX; ++x) { for (int x = startX; x <= endX; ++x) {
const Point2f texCoord((x + 0.5f) / textureSize, (y + 0.5f) / textureSize); const Point2f texCoord((x + 0.5f) / textureSize, (y + 0.5f) / textureSize);
@ -13912,155 +14195,271 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
vertices[face[1]] * barycentric.y + vertices[face[1]] * barycentric.y +
vertices[face[2]] * barycentric.z; vertices[face[2]] * barycentric.z;
// 检查3D点是否在相机前方 int pixelIdx = y * textureSize + x;
if (!sourceImage.camera.IsInFront(worldPoint)) {
continue;
}
// 从原始图像采样
Point2f imgPoint = sourceImage.camera.ProjectPointP(worldPoint);
// 确保在图像范围内
if (imgPoint.x < 0 || imgPoint.x >= sourceImage.image.cols ||
imgPoint.y < 0 || imgPoint.y >= sourceImage.image.rows) {
continue;
}
// 采样图像
Pixel8U sampledColor = SampleImageBilinear(sourceImage.image, imgPoint);
// 转换为BGR顺序用于颜色累加 // 从其他视图采样
cv::Vec3f bgrColor( for (size_t i = 1; i < visibleViews.size(); ++i) { // 从第2个视图开始
sampledColor.b, // B IIndex viewIdx = visibleViews[i];
sampledColor.g, // G if (viewIdx >= images.size()) continue;
sampledColor.r // R
); const Image& sourceImage = images[viewIdx];
// 获取颜色调整因子 // 检查可见性
cv::Vec3d adjust(1.0, 1.0, 1.0); if (!sourceImage.camera.IsInFront(worldPoint)) continue;
if (globalSampleCount > 0) {
// 计算颜色调整因子 // 投影
double avgGray = (globalMean[0] + globalMean[1] + globalMean[2]) / 3.0; Point2f imgPoint = sourceImage.camera.ProjectPointP(worldPoint);
if (avgGray > 0) { if (imgPoint.x < 0 || imgPoint.x >= sourceImage.image.cols ||
adjust[0] = globalMean[0] / avgGray; // B调整因子 imgPoint.y < 0 || imgPoint.y >= sourceImage.image.rows) {
adjust[1] = globalMean[1] / avgGray; // G调整因子 continue;
adjust[2] = globalMean[2] / avgGray; // R调整因子 }
// 限制调整幅度 // 采样图像
const double maxAdjust = 1.2; Pixel8U sampledColor = SampleImageBilinear(sourceImage.image, imgPoint);
const double minAdjust = 0.833; // 1/1.2
// 计算权重
float weight = 0.2f; // 默认较低权重
// 计算第一个顶点在图像上的投影
Point2f proj0 = sourceImage.camera.ProjectPointP(vertices[face[0]]);
// 计算投影分辨率
double projDist = cv::norm(imgPoint - proj0);
float resolution = 1.0f;
if (projDist > 1e-6) {
resolution = 1.0f / (float)projDist;
}
// 计算视角与法线夹角
Vertex cameraPos(sourceImage.camera.C.x, sourceImage.camera.C.y, sourceImage.camera.C.z);
Vertex viewDir = cameraPos - worldPoint;
float viewDirNorm = cv::norm(viewDir);
if (viewDirNorm > 0) {
viewDir = viewDir / viewDirNorm;
float viewAngle = std::abs(viewDir.dot(normal));
adjust[0] = std::max(minAdjust, std::min(adjust[0], maxAdjust)); // 综合权重 = 分辨率 * 视角角度
adjust[1] = std::max(minAdjust, std::min(adjust[1], maxAdjust)); weight = resolution * viewAngle;
adjust[2] = std::max(minAdjust, std::min(adjust[2], maxAdjust)); weight = std::max(0.1f, std::min(1.0f, weight * 0.5f)); // 限制权重范围
}
// 存储采样
PixelSample sample;
sample.color = cv::Vec3f(sampledColor.b, sampledColor.g, sampledColor.r);
sample.weight = weight;
sample.viewIdx = viewIdx;
#ifdef _USE_OPENMP
#pragma omp critical
#endif
{
pixelSamples[pixelIdx].push_back(sample);
additionalSamples++;
} }
} }
}
}
}
DEBUG_EXTRA("补充采样完成: 新增采样点 %d", additionalSamples);
// 6. 融合多视图颜色
DEBUG_EXTRA("融合多视图颜色");
cv::Mat3f colorAccum(textureSize, textureSize, cv::Vec3f(0, 0, 0));
cv::Mat1f weightAccum(textureSize, textureSize, 0.0f);
cv::Mat1i sampleCount(textureSize, textureSize, 0);
int fusedPixels = 0;
// 添加统计信息
int mainViewOnly = 0;
int multiViewFused = 0;
int highDiffCount = 0;
for (int y = 0; y < textureSize; ++y) {
for (int x = 0; x < textureSize; ++x) {
int pixelIdx = y * textureSize + x;
auto& samples = pixelSamples[pixelIdx];
if (samples.empty()) continue;
// 分离主视图和辅助视图
std::vector<cv::Vec3f> mainViewColors;
std::vector<float> mainViewWeights;
std::vector<cv::Vec3f> auxiliaryColors;
std::vector<float> auxiliaryWeights;
IIndex primaryView = INVALID_INDEX;
for (const auto& sample : samples) {
if (primaryView == INVALID_INDEX) {
primaryView = sample.viewIdx;
}
// 应用颜色调整 if (sample.viewIdx == primaryView) {
cv::Vec3f adjustedColor = bgrColor; // 主视图
adjustedColor[0] *= adjust[0]; // B通道 mainViewColors.push_back(sample.color);
adjustedColor[1] *= adjust[1]; // G通道 mainViewWeights.push_back(sample.weight);
adjustedColor[2] *= adjust[2]; // R通道 } else {
// 辅助视图
// 限制颜色范围 auxiliaryColors.push_back(sample.color);
adjustedColor[0] = std::max(0.0f, std::min(255.0f, adjustedColor[0])); auxiliaryWeights.push_back(sample.weight * 0.3f); // 辅助视图权重更低
adjustedColor[1] = std::max(0.0f, std::min(255.0f, adjustedColor[1])); }
adjustedColor[2] = std::max(0.0f, std::min(255.0f, adjustedColor[2])); }
if (mainViewColors.empty()) continue;
if (auxiliaryColors.empty()) {
mainViewOnly++;
} else {
multiViewFused++;
}
// 计算主视图的加权平均颜色
cv::Vec3f mainColor(0, 0, 0);
float mainWeightSum = 0.0f;
for (size_t i = 0; i < mainViewColors.size(); ++i) {
mainColor += mainViewColors[i] * mainViewWeights[i];
mainWeightSum += mainViewWeights[i];
}
if (mainWeightSum > 0) {
mainColor /= mainWeightSum;
}
// 如果有辅助视图,进行智能融合
cv::Vec3f finalColor = mainColor;
float finalWeight = 1.0f;
if (!auxiliaryColors.empty()) {
// 计算辅助视图的加权平均颜色
cv::Vec3f auxColor(0, 0, 0);
float auxWeightSum = 0.0f;
// 累加颜色和权重 for (size_t i = 0; i < auxiliaryColors.size(); ++i) {
float weight = 1.0f; auxColor += auxiliaryColors[i] * auxiliaryWeights[i];
float& w = weightAccum(y, x); auxWeightSum += auxiliaryWeights[i];
cv::Vec3f& c = colorAccum(y, x); }
if (auxWeightSum > 0) {
auxColor /= auxWeightSum;
}
#ifdef _USE_OPENMP // 计算颜色差异
#pragma omp atomic cv::Vec3f colorDiff = mainColor - auxColor;
#endif float colorDiffMag = std::sqrt(colorDiff.dot(colorDiff));
w += weight;
#ifdef _USE_OPENMP // 如果颜色差异不大,才融合辅助视图
#pragma omp critical if (colorDiffMag < 30.0f) { // 颜色差异阈值
#endif // 根据颜色差异调整融合权重
{ float alpha = std::exp(-colorDiffMag * 0.1f); // 差异越大,融合越少
c[0] += adjustedColor[0] * weight; finalColor = mainColor * (1.0f - alpha) + auxColor * alpha;
c[1] += adjustedColor[1] * weight; finalWeight = 1.0f;
c[2] += adjustedColor[2] * weight; } else {
// 颜色差异太大,只使用主视图
finalColor = mainColor;
finalWeight = 1.0f;
highDiffCount++;
} }
}
// 应用简单的颜色饱和度增强
float b = finalColor[0];
float g = finalColor[1];
float r = finalColor[2];
float maxVal = std::max(r, std::max(g, b));
float minVal = std::min(r, std::min(g, b));
float delta = maxVal - minVal;
if (delta > 5.0f) { // 避免对灰度像素处理
// 增强饱和度
float saturationBoost = 1.2f; // 20%饱和度增强
float mean = (r + g + b) / 3.0f;
r = mean + (r - mean) * saturationBoost;
g = mean + (g - mean) * saturationBoost;
b = mean + (b - mean) * saturationBoost;
// 限制在有效范围
r = std::max(0.0f, std::min(255.0f, r));
g = std::max(0.0f, std::min(255.0f, g));
b = std::max(0.0f, std::min(255.0f, b));
faceSampledPixels++; finalColor = cv::Vec3f(b, g, r);
} }
// 存储结果
colorAccum(y, x) = finalColor;
weightAccum(y, x) = finalWeight;
sampleCount(y, x) = (int)samples.size();
fusedPixels++;
} }
if (faceSampledPixels > 0) { // 输出进度
processedFaces++; if (y % 100 == 0) {
sampledPixels += faceSampledPixels; float progress = (float)y * 100.0f / textureSize;
} else { DEBUG_EXTRA("颜色融合进度: %.1f%% (处理行 %d/%d)", progress, y, textureSize);
failedFaces++;
} }
} }
DEBUG_EXTRA("纹理采样完成: 成功 %d 个面, 失败 %d 个面, 采样 %d 像素", DEBUG_EXTRA("颜色融合完成: 融合像素 %d", fusedPixels);
processedFaces, failedFaces, sampledPixels); DEBUG_EXTRA("融合统计: 仅主视图=%d, 多视图融合=%d, 高差异=%d",
mainViewOnly, multiViewFused, highDiffCount);
// 6. 应用权重归一化 // 7. 生成最终纹理
DEBUG_EXTRA("应用权重归一化"); DEBUG_EXTRA("生成最终纹理");
for (int y = 0; y < textureSize; ++y) { for (int y = 0; y < textureSize; ++y) {
for (int x = 0; x < textureSize; ++x) { for (int x = 0; x < textureSize; ++x) {
float weight = weightAccum(y, x); float weight = weightAccum(y, x);
if (weight > 0.0f) { if (weight > 0.0f) {
cv::Vec3f avgColor = colorAccum(y, x) / weight; cv::Vec3f bgrColor = colorAccum(y, x);
// 从BGR转回RGB顺序 // 转换为RGB顺序
Pixel8U finalColor; Pixel8U pixel;
finalColor.r = (unsigned char)cv::saturate_cast<uchar>(avgColor[2]); // R pixel.r = (unsigned char)cv::saturate_cast<uchar>(bgrColor[2]); // R
finalColor.g = (unsigned char)cv::saturate_cast<uchar>(avgColor[1]); // G pixel.g = (unsigned char)cv::saturate_cast<uchar>(bgrColor[1]); // G
finalColor.b = (unsigned char)cv::saturate_cast<uchar>(avgColor[0]); // B pixel.b = (unsigned char)cv::saturate_cast<uchar>(bgrColor[0]); // B
textureAtlas(y, x) = finalColor; textureAtlas(y, x) = pixel;
} else { } else {
textureAtlas(y, x) = colEmpty; textureAtlas(y, x) = colEmpty;
} }
} }
} }
// 7. 应用全局颜色校正 // 8. 应用颜色一致性优化
if (processedFaces > 0 && sampledPixels > 0) { if (fusedPixels > 0) {
ApplyGlobalColorCorrection(textureAtlas, colEmpty, 0.8f); DEBUG_EXTRA("应用颜色一致性优化");
ApplyColorConsistencyOptimization(textureAtlas, pixelSamples, textureSize, colEmpty);
} }
// 8. 锐化处理 // 9. 填充空白区域
if (fSharpnessWeight > 0 && sampledPixels > 0) { // FillTextureGaps(textureAtlas, colEmpty);
DEBUG_EXTRA("应用锐化处理");
// 10. 应用轻微的颜色校正
if (fusedPixels > 0) {
ApplyGlobalColorCorrection(textureAtlas, colEmpty, 0.3f);
}
// 11. 锐化处理
if (fSharpnessWeight > 0 && fusedPixels > 0) {
ApplySoftSharpening(textureAtlas, fSharpnessWeight, colEmpty); ApplySoftSharpening(textureAtlas, fSharpnessWeight, colEmpty);
} }
// 9. 最终颜色检查 // 12. 最终统计
if (!textureAtlas.empty()) { int validPixels = 0;
cv::Scalar mean = cv::mean(textureAtlas); for (int y = 0; y < textureSize; ++y) {
DEBUG_EXTRA("最终纹理平均颜色: B=%.1f, G=%.1f, R=%.1f (OpenCV BGR顺序)", for (int x = 0; x < textureSize; ++x) {
mean[0], mean[1], mean[2]); if (textureAtlas(y, x) != colEmpty) {
validPixels++;
double rSum = 0, gSum = 0, bSum = 0;
int count = 0;
for (int y = 0; y < textureAtlas.rows; ++y) {
for (int x = 0; x < textureAtlas.cols; ++x) {
Pixel8U pixel = textureAtlas(y, x);
if (pixel != colEmpty) {
rSum += pixel.r;
gSum += pixel.g;
bSum += pixel.b;
count++;
}
} }
} }
if (count > 0) {
DEBUG_EXTRA("有效像素RGB平均值: R=%.1f, G=%.1f, B=%.1f",
rSum / count, gSum / count, bSum / count);
}
} }
DEBUG_EXTRA("纹理图集生成完成: 有效像素 %d / %d (%.2f%%)",
validPixels, textureSize * textureSize,
(float)validPixels * 100 / (textureSize * textureSize));
return textures; return textures;
} }

Loading…
Cancel
Save