Browse Source

编译通过,但是空白

ManualUV
hesuicong 6 days ago
parent
commit
4a9013f12f
  1. 924
      libs/MVS/SceneTexture.cpp

924
libs/MVS/SceneTexture.cpp

@ -380,7 +380,10 @@ struct MeshTexture {
struct FaceData2 { struct FaceData2 {
std::vector<TextureSample> samples; std::vector<TextureSample> samples;
cv::Vec3f averageColor; cv::Vec3f averageColor;
IIndex bestView;
float confidence; float confidence;
FaceData2() : bestView(NO_ID), confidence(0.0f), averageColor(0,0,0) {}
}; };
// used to optimize texture patches // used to optimize texture patches
@ -676,6 +679,19 @@ public:
const Pixel8U& colEmpty); const Pixel8U& colEmpty);
void CheckColorChannels(const Image8U3& texture, const std::string& name); void CheckColorChannels(const Image8U3& texture, const std::string& name);
void SelectBestViewForFaces(const IIndexArr& views,
std::vector<IIndex>& faceBestViews,
std::vector<float>& faceConfidences);
void ApplyGraphCutViewSelection(
const std::vector<std::vector<float>>& viewScores,
std::vector<IIndex>& faceBestViews,
std::vector<float>& faceConfidences);
void BuildFaceAdjacency(std::vector<std::vector<int>>& adjacency);
void SampleFaceFromSingleView(FIndex idxFace,
IIndex bestView,
FaceData2& faceData,
int textureSize);
Mesh::Image8U3Arr GenerateTextureAtlasWith3DBridge( Mesh::Image8U3Arr GenerateTextureAtlasWith3DBridge(
const LabelArr& faceLabels, const LabelArr& faceLabels,
const IIndexArr& views, const IIndexArr& views,
@ -685,6 +701,19 @@ public:
unsigned nTextureSizeMultiple, unsigned nTextureSizeMultiple,
Pixel8U colEmpty, Pixel8U colEmpty,
float fSharpnessWeight); float fSharpnessWeight);
void ApplyNeighborhoodConsistency(
std::vector<FaceData2>& faceSamplesData,
const std::vector<IIndex>& faceBestViews,
int textureSize);
void GenerateTextureFromSingleViewSamples(
const std::vector<FaceData2>& faceSamplesData,
Mesh::Image8U3Arr& textures,
int textureSize,
Pixel8U colEmpty);
void ApplyAntialiasing(Image8U3& texture, const Pixel8U& colEmpty);
void ApplyConservativeSmoothing(Image8U3& texture,
const Pixel8U& colEmpty);
float ComputeViewWeight(const Image& image, const Vertex& faceCenter, const Vertex& normal); float ComputeViewWeight(const Image& image, const Vertex& faceCenter, const Vertex& normal);
float ComputeProjectedArea(const Vertex& faceCenter, const Vertex& normal, const Camera& camera); float ComputeProjectedArea(const Vertex& faceCenter, const Vertex& normal, const Camera& camera);
void FillTextureGaps2(Image8U3& texture, const Pixel8U& colEmpty, int patchSize); void FillTextureGaps2(Image8U3& texture, const Pixel8U& colEmpty, int patchSize);
@ -14496,6 +14525,7 @@ float MeshTexture::ComputeProjectedArea(const Vertex& faceCenter, const Vertex&
return std::max(0.0f, projectionArea); return std::max(0.0f, projectionArea);
} }
// 5. 重新设计GenerateTextureAtlasWith3DBridge
Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge( Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
const LabelArr& faceLabels, const LabelArr& faceLabels,
const IIndexArr& views, const IIndexArr& views,
@ -14506,50 +14536,94 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
Pixel8U colEmpty, Pixel8U colEmpty,
float fSharpnessWeight) float fSharpnessWeight)
{ {
DEBUG_EXTRA("优化版纹理图集生成 - 多视图融合 + 颜色一致性"); DEBUG_EXTRA("优化版纹理图集生成 - 单一视图采样 + 邻域一致性");
TD_TIMER_START(); TD_TIMER_START();
// 1. 计算纹理尺寸 // 1. 验证UV坐标
DEBUG_EXTRA("验证UV坐标...");
int validUVCount = 0;
int outOfRangeUV = 0;
FOREACH(i, scene.mesh.faceTexcoords) {
const TexCoord& uv = scene.mesh.faceTexcoords[i];
if (uv.x < 0.0f || uv.x > 1.0f || uv.y < 0.0f || uv.y > 1.0f) {
outOfRangeUV++;
} else {
validUVCount++;
}
}
DEBUG_EXTRA("UV坐标统计: 有效UV=%d, 超出范围UV=%d, 总数=%zu",
validUVCount, outOfRangeUV, scene.mesh.faceTexcoords.size());
if (validUVCount == 0) {
DEBUG_EXTRA("错误: 没有有效的UV坐标");
return Mesh::Image8U3Arr();
}
// 2. 计算纹理尺寸
AABB2f uvBounds(true); AABB2f uvBounds(true);
bool hasValidUV = false;
FOREACH(i, scene.mesh.faceTexcoords) { FOREACH(i, scene.mesh.faceTexcoords) {
const TexCoord& uv = scene.mesh.faceTexcoords[i]; const TexCoord& uv = scene.mesh.faceTexcoords[i];
if (uv.x >= 0.0f && uv.x <= 1.0f && uv.y >= 0.0f && uv.y <= 1.0f) {
uvBounds.InsertFull(uv); uvBounds.InsertFull(uv);
hasValidUV = true;
} else {
DEBUG_EXTRA("警告: UV坐标超出范围[%d]: (%.4f, %.4f)", i, uv.x, uv.y);
}
} }
if (!hasValidUV) {
DEBUG_EXTRA("错误: 没有有效的UV坐标在[0,1]范围内");
return Mesh::Image8U3Arr();
}
// 计算UV边界框的宽度和高度
float uvWidth = uvBounds.ptMax.x() - uvBounds.ptMin.x(); float uvWidth = uvBounds.ptMax.x() - uvBounds.ptMin.x();
float uvHeight = uvBounds.ptMax.y() - uvBounds.ptMin.y(); float uvHeight = uvBounds.ptMax.y() - uvBounds.ptMin.y();
int textureSize = ComputeOptimalTextureSize(uvWidth, uvHeight, nTextureSizeMultiple);
DEBUG_EXTRA("UV边界: [%.3f,%.3f] -> [%.3f,%.3f]", DEBUG_EXTRA("UV边界框: 宽度=%.4f, 高度=%.4f, 最小=(%.4f,%.4f), 最大=(%.4f,%.4f)",
uvWidth, uvHeight,
uvBounds.ptMin.x(), uvBounds.ptMin.y(), uvBounds.ptMin.x(), uvBounds.ptMin.y(),
uvBounds.ptMax.x(), uvBounds.ptMax.y()); uvBounds.ptMax.x(), uvBounds.ptMax.y());
DEBUG_EXTRA("纹理尺寸: %dx%d", textureSize, textureSize);
// 2. 构建面片可见视图
std::vector<std::vector<IIndex>> faceVisibleViews;
FaceDataViewArr facesDatas;
if (!ListCameraFaces(facesDatas, 0.6f, -1, views, false)) { if (uvWidth <= 0.0f || uvHeight <= 0.0f) {
DEBUG_EXTRA("错误: 无法构建相机-面片关系"); DEBUG_EXTRA("错误: UV边界框无效");
return Mesh::Image8U3Arr(); return Mesh::Image8U3Arr();
} }
BuildFaceViewVisibility(views, facesDatas, faceVisibleViews); int textureSize = ComputeOptimalTextureSize(uvWidth, uvHeight, nTextureSizeMultiple);
DEBUG_EXTRA("计算纹理尺寸: %dx%d (基于UV范围 %.4fx%.4f)",
textureSize, textureSize, uvWidth, uvHeight);
// 2. 为每个面片选择最佳视图(考虑邻域一致性)
std::vector<IIndex> faceBestViews;
std::vector<float> faceConfidences;
SelectBestViewForFaces(views, faceBestViews, faceConfidences);
// 3. 构建面片采样数据 // 3. 构建面片采样数据(从单一视图)
DEBUG_EXTRA("构建面片采样数据..."); DEBUG_EXTRA("构建面片采样数据(单一视图)...");
std::vector<FaceData2> faceSamplesData(scene.mesh.faces.size()); std::vector<FaceData2> faceSamplesData(scene.mesh.faces.size());
#ifdef _USE_OPENMP #ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) #pragma omp parallel for schedule(dynamic)
#endif #endif
for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) { for (int_t idxFace = 0; idxFace < (int_t)scene.mesh.faces.size(); ++idxFace) {
BuildFaceSamples((FIndex)idxFace, faceLabels, faceSamplesData, textureSize); IIndex bestView = faceBestViews[idxFace];
SampleFaceFromSingleView((FIndex)idxFace, bestView,
faceSamplesData[idxFace], textureSize);
} }
// 4. 生成基础纹理 // 4. 应用邻域一致性优化
DEBUG_EXTRA("应用邻域一致性优化...");
ApplyNeighborhoodConsistency(faceSamplesData, faceBestViews, textureSize);
// 5. 生成纹理
DEBUG_EXTRA("生成纹理...");
Mesh::Image8U3Arr textures; Mesh::Image8U3Arr textures;
GenerateTextureAtlasOptimized(faceSamplesData, textures, textureSize, colEmpty); GenerateTextureFromSingleViewSamples(faceSamplesData, textures,
textureSize, colEmpty);
if (textures.empty()) { if (textures.empty()) {
DEBUG_EXTRA("错误: 无法生成纹理"); DEBUG_EXTRA("错误: 无法生成纹理");
@ -14558,33 +14632,16 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
Image8U3& textureAtlas = textures[0]; Image8U3& textureAtlas = textures[0];
// 5. 应用颜色一致性优化 // 6. 应用简单的后处理
DEBUG_EXTRA("应用多视图颜色一致性优化..."); DEBUG_EXTRA("应用后处理...");
ApplyMultiViewColorOptimization(textureAtlas, faceSamplesData, colEmpty); ApplyConservativeSmoothing(textureAtlas, colEmpty);
// 6. 应用梯度域优化
DEBUG_EXTRA("应用梯度域优化...");
ApplyGradientDomainOptimization(textureAtlas, colEmpty);
// 7. 应用纹理接缝平滑
DEBUG_EXTRA("应用纹理接缝平滑...");
ApplyTextureSeamBlending(textureAtlas, colEmpty);
// 8. 填充空白区域
DEBUG_EXTRA("填充空白区域...");
FillTextureGaps2(textureAtlas, colEmpty, 5);
// 9. 应用锐化
if (fSharpnessWeight > 0) {
DEBUG_EXTRA("应用锐化: 强度=%.2f", fSharpnessWeight);
ApplySoftSharpening(textureAtlas, fSharpnessWeight, colEmpty);
}
// 10. 最终统计 // 7. 统计
int validPixels = 0; int validPixels = 0;
for (int y = 0; y < textureSize; ++y) { for (int y = 0; y < textureSize; ++y) {
for (int x = 0; x < textureSize; ++x) { for (int x = 0; x < textureSize; ++x) {
if (textureAtlas(y, x) != colEmpty) { if (textureAtlas.at<cv::Vec3b>(y, x) !=
cv::Vec3b(colEmpty.b, colEmpty.g, colEmpty.r)) {
validPixels++; validPixels++;
} }
} }
@ -14598,6 +14655,793 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasWith3DBridge(
return textures; return textures;
} }
// 6. 邻域一致性优化
void MeshTexture::ApplyNeighborhoodConsistency(
std::vector<FaceData2>& faceSamplesData,
const std::vector<IIndex>& faceBestViews,
int textureSize)
{
DEBUG_EXTRA("应用邻域一致性优化");
std::vector<std::vector<int>> adjacency;
BuildFaceAdjacency(adjacency);
int numFaces = (int)scene.mesh.faces.size();
int numIterations = 3;
for (int iter = 0; iter < numIterations; ++iter) {
int improvedFaces = 0;
#ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) reduction(+:improvedFaces)
#endif
for (int faceIdx = 0; faceIdx < numFaces; ++faceIdx) {
FaceData2& faceData = faceSamplesData[faceIdx];
if (faceData.samples.empty()) continue;
IIndex currentView = faceBestViews[faceIdx];
cv::Vec3f currentColor = faceData.averageColor;
// 收集相邻面片的颜色
std::vector<cv::Vec3f> neighborColors;
std::vector<float> neighborWeights;
for (int neighborIdx : adjacency[faceIdx]) {
if (neighborIdx < faceSamplesData.size()) {
FaceData2& neighborData = faceSamplesData[neighborIdx];
if (!neighborData.samples.empty() && neighborData.confidence > 0.5f) {
neighborColors.push_back(neighborData.averageColor);
neighborWeights.push_back(neighborData.confidence);
}
}
}
if (neighborColors.size() < 2) continue;
// 计算邻域平均颜色
cv::Vec3f neighborAvg(0, 0, 0);
float totalWeight = 0.0f;
for (size_t i = 0; i < neighborColors.size(); ++i) {
neighborAvg += neighborColors[i] * neighborWeights[i];
totalWeight += neighborWeights[i];
}
if (totalWeight > 0) {
neighborAvg /= totalWeight;
// 计算颜色差异
cv::Vec3f diff = currentColor - neighborAvg;
float colorDiff = std::sqrt(diff.dot(diff));
// 如果差异太大,向邻域平均靠近
if (colorDiff > 20.0f) { // 阈值可以根据需要调整
float blendFactor = 0.3f;
cv::Vec3f adjustedColor = currentColor * (1.0f - blendFactor) +
neighborAvg * blendFactor;
// 调整所有样本颜色
for (auto& sample : faceData.samples) {
sample.color = sample.color * (1.0f - blendFactor) +
neighborAvg * blendFactor;
}
faceData.averageColor = adjustedColor;
improvedFaces++;
}
}
}
DEBUG_EXTRA("邻域一致性迭代 %d: 优化了 %d 个面片",
iter + 1, improvedFaces);
if (improvedFaces == 0) break;
}
}
void MeshTexture::ApplyAntialiasing(Image8U3& texture, const Pixel8U& colEmpty)
{
DEBUG_EXTRA("应用抗锯齿处理");
int rows = texture.rows;
int cols = texture.cols;
if (rows <= 1 || cols <= 1) return;
cv::Vec3b emptyColor(colEmpty.b, colEmpty.g, colEmpty.r);
Image8U3 smoothed = texture.clone();
// 创建掩码
cv::Mat mask(rows, cols, CV_8U, cv::Scalar(0));
for (int y = 0; y < rows; ++y) {
cv::Vec3b* row = texture.ptr<cv::Vec3b>(y);
uchar* maskRow = mask.ptr<uchar>(y);
for (int x = 0; x < cols; ++x) {
if (row[x] != emptyColor) {
maskRow[x] = 255;
}
}
}
// 检测边缘
cv::Mat edges;
cv::Canny(mask, edges, 50, 150);
// 对边缘像素进行抗锯齿
int kernelRadius = 1;
for (int y = 0; y < rows; ++y) {
for (int x = 0; x < cols; ++x) {
if (edges.at<uchar>(y, x) > 0) {
// 边缘像素,计算周围的有效颜色平均值
int count = 0;
cv::Vec3f sumColor(0, 0, 0);
for (int dy = -kernelRadius; dy <= kernelRadius; ++dy) {
for (int dx = -kernelRadius; dx <= kernelRadius; ++dx) {
int nx = x + dx;
int ny = y + dy;
if (nx >= 0 && nx < cols && ny >= 0 && ny < rows) {
cv::Vec3b pixel = texture.at<cv::Vec3b>(ny, nx);
if (pixel != emptyColor) {
sumColor += cv::Vec3f(pixel[0], pixel[1], pixel[2]);
count++;
}
}
}
}
if (count > 0) {
cv::Vec3f avgColor = sumColor / (float)count;
smoothed.at<cv::Vec3b>(y, x) = cv::Vec3b(
(unsigned char)cv::saturate_cast<uchar>(avgColor[0]),
(unsigned char)cv::saturate_cast<uchar>(avgColor[1]),
(unsigned char)cv::saturate_cast<uchar>(avgColor[2])
);
}
}
}
}
smoothed.copyTo(texture);
DEBUG_EXTRA("抗锯齿处理完成");
}
void MeshTexture::GenerateTextureFromSingleViewSamples(
const std::vector<FaceData2>& faceSamplesData,
Mesh::Image8U3Arr& textures,
int textureSize,
Pixel8U colEmpty)
{
DEBUG_EXTRA("从单一视图采样数据生成纹理");
if (faceSamplesData.empty()) {
DEBUG_EXTRA("错误: 采样数据为空");
return;
}
// 统计采样数据分布
std::vector<int> sampleDistribution(10, 0); // 按采样数量分组
int totalSamples = 0;
int facesWithSamples = 0;
for (size_t i = 0; i < faceSamplesData.size(); ++i) {
int sampleCount = (int)faceSamplesData[i].samples.size();
if (sampleCount > 0) {
facesWithSamples++;
totalSamples += sampleCount;
int group = sampleCount / 10;
if (group >= 10) group = 9;
sampleDistribution[group]++;
}
}
DEBUG_EXTRA("采样数据统计: 有采样数据的面片=%d/%zu (%.1f%%)",
facesWithSamples, faceSamplesData.size(),
(float)facesWithSamples * 100 / faceSamplesData.size());
DEBUG_EXTRA("总采样数: %d, 平均每个面片=%.1f",
totalSamples, (float)totalSamples / facesWithSamples);
// 输出采样分布
for (int i = 0; i < 10; ++i) {
if (sampleDistribution[i] > 0) {
DEBUG_EXTRA("采样数 %d-%d: %d 个面片",
i*10, (i+1)*10-1, sampleDistribution[i]);
}
}
// 检查前几个面片的详细信息
for (int i = 0; i < 20; ++i) {
if (i >= faceSamplesData.size()) break;
const FaceData2& faceData = faceSamplesData[i];
if (faceData.samples.empty()) {
DEBUG_EXTRA("面片 %d: 无采样数据, bestView=%d, confidence=%.4f",
i, faceData.bestView, faceData.confidence);
} else {
DEBUG_EXTRA("面片 %d: 采样数=%zu, bestView=%d, confidence=%.4f, "
"平均颜色=(%.1f,%.1f,%.1f)",
i, faceData.samples.size(), faceData.bestView, faceData.confidence,
faceData.averageColor[0], faceData.averageColor[1], faceData.averageColor[2]);
}
}
// 1. 创建纹理
Image8U3 texture;
texture.create(textureSize, textureSize);
// 填充背景色
for (int y = 0; y < textureSize; ++y) {
Pixel8U* row = texture.ptr<Pixel8U>(y);
for (int x = 0; x < textureSize; ++x) {
row[x] = colEmpty;
}
}
textures.resize(1);
textures[0] = texture;
// 2. 为每个面片填充纹理
int filledPixels = 0;
int totalFaces = (int)scene.mesh.faces.size();
int facesProcessed = 0;
int facesSkippedNoUV = 0;
int facesSkippedNoSamples = 0;
// 检查UV坐标
if (scene.mesh.faceTexcoords.empty()) {
DEBUG_EXTRA("错误: mesh.faceTexcoords 为空!");
return;
}
if (scene.mesh.faceTexcoords.size() != scene.mesh.faces.size() * 3) {
DEBUG_EXTRA("错误: UV坐标数量不匹配! faces=%zu, UVs=%zu",
scene.mesh.faces.size(), scene.mesh.faceTexcoords.size());
return;
}
#ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) \
reduction(+:filledPixels) \
reduction(+:facesProcessed) \
reduction(+:facesSkippedNoUV) \
reduction(+:facesSkippedNoSamples)
#endif
for (int faceIdx = 0; faceIdx < totalFaces; ++faceIdx) {
facesProcessed++;
const FaceData2& faceData = faceSamplesData[faceIdx];
if (faceData.samples.empty()) {
facesSkippedNoSamples++;
continue;
}
// 获取UV坐标
if (faceIdx * 3 + 2 >= (int)scene.mesh.faceTexcoords.size()) {
facesSkippedNoUV++;
continue;
}
const TexCoord* meshUVs = &scene.mesh.faceTexcoords[faceIdx * 3];
// 计算UV边界
float minU = FLT_MAX, maxU = -FLT_MAX;
float minV = FLT_MAX, maxV = -FLT_MAX;
for (int i = 0; i < 3; ++i) {
float u = meshUVs[i].x;
float v = meshUVs[i].y;
minU = std::min(minU, u);
maxU = std::max(maxU, u);
minV = std::min(minV, v);
maxV = std::max(maxV, v);
}
// 检查UV是否在有效范围内
if (minU < 0.0f || maxU > 1.0f || minV < 0.0f || maxV > 1.0f) {
#ifdef _USE_OPENMP
#pragma omp critical
#endif
{
if (faceIdx < 20) { // 只输出前20个用于调试
DEBUG_EXTRA("面片 %d UV超出范围: U=[%.4f,%.4f], V=[%.4f,%.4f]",
faceIdx, minU, maxU, minV, maxV);
}
}
continue;
}
// 转换为纹理像素坐标
int startX = (int)floor(minU * textureSize);
int endX = (int)ceil(maxU * textureSize);
int startY = (int)floor(minV * textureSize);
int endY = (int)ceil(maxV * textureSize);
// 限制在纹理范围内
startX = std::max(0, startX);
endX = std::min(endX, textureSize - 1);
startY = std::max(0, startY);
endY = std::min(endY, textureSize - 1);
if (startX > endX || startY > endY) {
continue;
}
// 计算三角形在纹理空间中的重心坐标
// 注意:OpenCV图像坐标系Y轴向下,UV坐标系Y轴向上,需要翻转V坐标
cv::Point2f uv0(meshUVs[0].x, 1.0f - meshUVs[0].y);
cv::Point2f uv1(meshUVs[1].x, 1.0f - meshUVs[1].y);
cv::Point2f uv2(meshUVs[2].x, 1.0f - meshUVs[2].y);
int facePixels = 0;
for (int y = startY; y <= endY; ++y) {
Pixel8U* row = texture.ptr<Pixel8U>(y);
for (int x = startX; x <= endX; ++x) {
// 像素中心坐标
cv::Point2f texCoord(
(x + 0.5f) / textureSize,
(y + 0.5f) / textureSize
);
// 使用重心坐标判断点是否在三角形内
cv::Point2f v0 = uv1 - uv0;
cv::Point2f v1 = uv2 - uv0;
cv::Point2f v2 = texCoord - uv0;
float dot00 = v0.dot(v0);
float dot01 = v0.dot(v1);
float dot11 = v1.dot(v1);
float dot20 = v2.dot(v0);
float dot21 = v2.dot(v1);
float denom = dot00 * dot11 - dot01 * dot01;
if (fabs(denom) < 1e-10f) continue;
float invDenom = 1.0f / denom;
float alpha = (dot11 * dot20 - dot01 * dot21) * invDenom;
float beta = (dot00 * dot21 - dot01 * dot20) * invDenom;
float gamma = 1.0f - alpha - beta;
// 检查点是否在三角形内
if (alpha >= -1e-6f && beta >= -1e-6f && gamma >= -1e-6f) {
// 在三角形内,使用平均颜色
Pixel8U color;
color.r = (unsigned char)cv::saturate_cast<uchar>(faceData.averageColor[2]);
color.g = (unsigned char)cv::saturate_cast<uchar>(faceData.averageColor[1]);
color.b = (unsigned char)cv::saturate_cast<uchar>(faceData.averageColor[0]);
// 检查颜色是否有效
if (color.r == 0 && color.g == 0 && color.b == 0) {
// 黑色可能是无效颜色,跳过
continue;
}
row[x] = color;
facePixels++;
}
}
}
if (facePixels > 0) {
filledPixels += facePixels;
if (faceIdx < 20) { // 只输出前20个面片用于调试
#ifdef _USE_OPENMP
#pragma omp critical
#endif
{
DEBUG_EXTRA("面片 %d: 填充了 %d 像素, UV范围: U=[%.4f,%.4f], V=[%.4f,%.4f], "
"像素范围: [%d,%d]x[%d,%d]",
faceIdx, facePixels, minU, maxU, minV, maxV,
startX, endX, startY, endY);
}
}
}
}
DEBUG_EXTRA("纹理填充统计: 处理了 %d 个面片, 跳过了 %d 个无采样面片, %d 个无UV面片",
facesProcessed, facesSkippedNoSamples, facesSkippedNoUV);
DEBUG_EXTRA("纹理填充完成: %d 像素 (%.2f%%)",
filledPixels, (float)filledPixels * 100 / (textureSize * textureSize));
// 3. 如果需要,可以应用抗锯齿
if (filledPixels > 0) {
ApplyAntialiasing(texture, colEmpty);
} else {
DEBUG_EXTRA("警告: 纹理中没有填充任何像素");
// // 保存纹理以进行调试
// std::string debugPath = "debug_texture.png";
// cv::Mat cvTexture(texture.rows, texture.cols, CV_8UC3, texture.data());
// cv::cvtColor(cvTexture, cvTexture, cv::COLOR_RGB2BGR);
// cv::imwrite(debugPath, cvTexture);
// DEBUG_EXTRA("调试: 保存空白纹理到 %s", debugPath.c_str());
}
}
// 7. 保守的平滑处理
void MeshTexture::ApplyConservativeSmoothing(Image8U3& texture,
const Pixel8U& colEmpty)
{
DEBUG_EXTRA("应用保守平滑处理");
if (texture.empty()) return;
cv::Mat mask(texture.rows, texture.cols, CV_8U, cv::Scalar(0));
// 创建掩码
for (int y = 0; y < texture.rows; ++y) {
cv::Vec3b* row = texture.ptr<cv::Vec3b>(y);
uchar* maskRow = mask.ptr<uchar>(y);
for (int x = 0; x < texture.cols; ++x) {
if (row[x] != cv::Vec3b(colEmpty.b, colEmpty.g, colEmpty.r)) {
maskRow[x] = 255;
}
}
}
// 只在内部区域进行轻微平滑
cv::Mat blurred;
cv::GaussianBlur(texture, blurred, cv::Size(3, 3), 0.5);
// 只平滑非边缘区域
cv::Mat edges;
cv::Canny(mask, edges, 50, 150);
// 应用平滑
for (int y = 0; y < texture.rows; ++y) {
cv::Vec3b* texRow = texture.ptr<cv::Vec3b>(y);
const cv::Vec3b* blurRow = blurred.ptr<cv::Vec3b>(y);
const uchar* edgeRow = edges.ptr<uchar>(y);
const uchar* maskRow = mask.ptr<uchar>(y);
for (int x = 0; x < texture.cols; ++x) {
if (maskRow[x] && !edgeRow[x]) { // 非边缘的有效像素
// 轻微混合(10%的模糊)
for (int c = 0; c < 3; ++c) {
float value = texRow[x][c] * 0.9f + blurRow[x][c] * 0.1f;
texRow[x][c] = cv::saturate_cast<uchar>(value);
}
}
}
}
}
// 1. 为每个面片选择最佳视图(基于全局一致性)
void MeshTexture::SelectBestViewForFaces(
const IIndexArr& views,
std::vector<IIndex>& faceBestViews,
std::vector<float>& faceConfidences)
{
DEBUG_EXTRA("为每个面片选择最佳视图(全局一致性)");
int totalFaces = (int)scene.mesh.faces.size();
faceBestViews.resize(totalFaces, NO_ID);
faceConfidences.resize(totalFaces, 0.0f);
int facesWithView = 0;
#ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) reduction(+:facesWithView)
#endif
for (int_t idxFace = 0; idxFace < totalFaces; ++idxFace) {
FIndex faceID = (FIndex)idxFace;
// 为每个面片选择最佳视图
float bestScore = -1.0f;
IIndex bestView = NO_ID;
for (IIndex viewIdx : views) {
if (viewIdx >= images.size()) continue;
const Image& image = images[viewIdx];
if (image.image.empty()) continue;
// 计算面片中心
const Face& face = scene.mesh.faces[faceID];
Vertex faceCenter(0, 0, 0);
for (int v = 0; v < 3; ++v) {
faceCenter += vertices[face[v]];
}
faceCenter /= 3.0f;
// 计算面法线
Vertex v0 = vertices[face[1]] - vertices[face[0]];
Vertex v1 = vertices[face[2]] - vertices[face[0]];
Vertex normal = v0.cross(v1);
float len = sqrtf(normal.dot(normal)); // 通过点积计算
if (len > 0.0f) {
normal /= len;
}
// 计算视图分数
float score = ComputeViewScore(image, faceCenter, normal);
if (score > bestScore) {
bestScore = score;
bestView = viewIdx;
}
}
if (bestView != NO_ID && bestScore > 0.1f) { // 阈值可以根据需要调整
faceBestViews[faceID] = bestView;
faceConfidences[faceID] = bestScore;
facesWithView++;
}
// 调试输出前几个面片
if (faceID < 20) {
DEBUG_EXTRA("面片 %d: 最佳视图=%d, 分数=%.4f",
faceID, bestView, bestScore);
}
}
DEBUG_EXTRA("视图选择完成: %d/%d 个面有最佳视图 (%.1f%%)",
facesWithView, totalFaces,
(float)facesWithView * 100 / totalFaces);
}
// 2. 图割优化视图选择
void MeshTexture::ApplyGraphCutViewSelection(
const std::vector<std::vector<float>>& viewScores,
std::vector<IIndex>& faceBestViews,
std::vector<float>& faceConfidences)
{
DEBUG_EXTRA("应用图割优化视图选择");
int numFaces = (int)scene.mesh.faces.size();
int numViews = images.size();
// 计算相邻面片
std::vector<std::vector<int>> adjacency(numFaces);
BuildFaceAdjacency(adjacency);
// 图割参数
float smoothWeight = 1.0f; // 平滑项权重
float dataWeight = 2.0f; // 数据项权重
// 初始化:每个面片选择分数最高的视图
for (int faceIdx = 0; faceIdx < numFaces; ++faceIdx) {
float bestScore = 0.0f;
IIndex bestView = NO_ID;
for (int viewIdx = 0; viewIdx < numViews; ++viewIdx) {
float score = viewScores[faceIdx][viewIdx];
if (score > bestScore) {
bestScore = score;
bestView = viewIdx;
}
}
faceBestViews[faceIdx] = bestView;
faceConfidences[faceIdx] = bestScore;
}
// 迭代优化:确保相邻面片选择相同视图
int maxIterations = 10;
float improvementThreshold = 0.01f;
for (int iter = 0; iter < maxIterations; ++iter) {
int changes = 0;
for (int faceIdx = 0; faceIdx < numFaces; ++faceIdx) {
if (faceConfidences[faceIdx] < 0.1f) continue;
IIndex currentView = faceBestViews[faceIdx];
float currentScore = faceConfidences[faceIdx];
// 检查相邻面片
std::unordered_map<IIndex, int> neighborViews;
for (int neighborIdx : adjacency[faceIdx]) {
IIndex neighborView = faceBestViews[neighborIdx];
if (neighborView != NO_ID) {
neighborViews[neighborView]++;
}
}
// 计算平滑项
float smoothScore = 0.0f;
IIndex mostCommonView = NO_ID;
int maxCount = 0;
for (const auto& [view, count] : neighborViews) {
if (count > maxCount) {
maxCount = count;
mostCommonView = view;
}
}
if (mostCommonView != NO_ID && mostCommonView != currentView) {
// 考虑切换到相邻面片最常见的视图
float alternativeScore = viewScores[faceIdx][mostCommonView];
float smoothBenefit = smoothWeight * (maxCount / (float)adjacency[faceIdx].size());
if (alternativeScore + smoothBenefit > currentScore) {
faceBestViews[faceIdx] = mostCommonView;
faceConfidences[faceIdx] = alternativeScore;
changes++;
}
}
}
DEBUG_EXTRA("图割迭代 %d: 改变了 %d 个面片的视图", iter + 1, changes);
if (changes == 0) break;
}
}
// 3. 构建面片邻接关系
void MeshTexture::BuildFaceAdjacency(std::vector<std::vector<int>>& adjacency)
{
int numFaces = (int)scene.mesh.faces.size();
adjacency.resize(numFaces);
// 构建边-面片映射
std::map<std::pair<int, int>, std::vector<int>> edgeFaces;
for (int faceIdx = 0; faceIdx < numFaces; ++faceIdx) {
const Face& face = scene.mesh.faces[faceIdx];
for (int i = 0; i < 3; ++i) {
int v1 = face[i];
int v2 = face[(i + 1) % 3];
int minV = std::min(v1, v2);
int maxV = std::max(v1, v2);
edgeFaces[{minV, maxV}].push_back(faceIdx);
}
}
// 通过共享边构建邻接
for (const auto& [edge, faces] : edgeFaces) {
if (faces.size() == 2) {
int f1 = faces[0];
int f2 = faces[1];
adjacency[f1].push_back(f2);
adjacency[f2].push_back(f1);
}
}
}
// 4. 为每个面片从单一视图采样
void MeshTexture::SampleFaceFromSingleView(FIndex idxFace, IIndex viewIdx,
FaceData2& faceData, int textureSize)
{
faceData.samples.clear();
faceData.averageColor = cv::Vec3f(0, 0, 0);
faceData.bestView = viewIdx;
faceData.confidence = 0.0f;
if (viewIdx == NO_ID) {
// DEBUG_EXTRA("面片 %d: 无有效视图", idxFace);
return;
}
const Image& imageData = images[viewIdx];
if (imageData.image.empty()) {
DEBUG_EXTRA("面片 %d: 视图 %d 图像为空", idxFace, viewIdx);
return;
}
const Face& face = scene.mesh.faces[idxFace];
// 获取顶点
const Point3f& v0 = vertices[face[0]];
const Point3f& v1 = vertices[face[1]];
const Point3f& v2 = vertices[face[2]];
// 计算面片中心
Point3f center = (v0 + v1 + v2) / 3.0f;
// 变换到相机坐标系
const Camera& camera = imageData.camera;
// 将Point3f转换为Point3d
Point3d center_double(center.x, center.y, center.z);
Point3d centerCam = camera.R * center_double + camera.C;
// 检查是否在相机前方
if (centerCam.z <= 0) {
return;
}
// 投影到图像平面
Point2f proj = camera.TransformPointW2I(center_double);
// 检查是否在图像范围内
if (proj.x < 0 || proj.x >= imageData.image.width() ||
proj.y < 0 || proj.y >= imageData.image.height()) {
return;
}
if (scene.mesh.faceTexcoords.empty()) {
DEBUG_EXTRA("错误: 面片 %d 没有UV坐标", idxFace);
return;
}
// 获取面片的UV坐标
const TexCoord* meshUVs = &scene.mesh.faceTexcoords[idxFace * 3];
// 采样
int sampleCount = 0;
cv::Vec3f sumColor(0, 0, 0);
// 在面片上采样多个点
const int numSamples = 10;
for (int i = 0; i < numSamples; ++i) {
// 使用重心坐标随机采样
float a = (float)rand() / RAND_MAX;
float b = (float)rand() / RAND_MAX;
if (a + b > 1.0f) {
a = 1.0f - a;
b = 1.0f - b;
}
float c = 1.0f - a - b;
Point3f samplePt_f = v0 * a + v1 * b + v2 * c;
TexCoord sampleUV(
meshUVs[0].x * a + meshUVs[1].x * b + meshUVs[2].x * c,
meshUVs[0].y * a + meshUVs[1].y * b + meshUVs[2].y * c
);
// 将Point3f转换为Point3d
Point3d samplePt(samplePt_f.x, samplePt_f.y, samplePt_f.z);
// 投影到图像
Point2f imgPt = camera.TransformPointW2I(samplePt);
int x = (int)imgPt.x;
int y = (int)imgPt.y;
if (x >= 0 && x < imageData.image.width() &&
y >= 0 && y < imageData.image.height()) {
// 获取颜色
const Pixel8U& pixel = imageData.image(y, x);
// 创建TextureSample对象
TextureSample sample;
// 根据TextureSample的实际定义设置成员
// 如果TextureSample有color成员
sample.color = cv::Vec3f(pixel.r, pixel.g, pixel.b);
// 如果TextureSample有uv成员
// sample.u = sampleUV.x;
// sample.v = sampleUV.y;
// 如果TextureSample有pos成员
// sample.pos.x = x;
// sample.pos.y = y;
// 如果TextureSample有其他成员
// sample.faceIdx = idxFace;
// sample.viewIdx = viewIdx;
faceData.samples.push_back(sample);
// 计算平均颜色
cv::Vec3f color(pixel.r, pixel.g, pixel.b);
sumColor += color;
sampleCount++;
}
}
if (sampleCount > 0) {
faceData.averageColor = sumColor / (float)sampleCount;
faceData.confidence = (float)sampleCount / numSamples; // 置信度基于采样率
}
// 调试输出
if (idxFace < 10) { // 输出前10个面片的采样结果
DEBUG_EXTRA("面片 %d 采样完成: %d 个有效样本, 视图 %d, 平均颜色 (%.1f,%.1f,%.1f)",
idxFace, sampleCount, viewIdx,
faceData.averageColor[2], faceData.averageColor[1], faceData.averageColor[0]);
}
}
void MeshTexture::ApplyGlobalColorCorrection(Image8U3& texture, Pixel8U colEmpty, float strength) { void MeshTexture::ApplyGlobalColorCorrection(Image8U3& texture, Pixel8U colEmpty, float strength) {
if (strength <= 0) return; if (strength <= 0) return;

Loading…
Cancel
Save