Browse Source

UV分割空白的地方填充

ManualUV
hesuicong 1 month ago
parent
commit
c70d12b645
  1. 300
      libs/MVS/SceneTexture.cpp

300
libs/MVS/SceneTexture.cpp

@ -9705,6 +9705,222 @@ Point2f MeshTexture::ProjectPointRobust(const Camera& camera, const Vertex& worl
return imgPoint; return imgPoint;
} }
// 辅助函数:计算点到线段的最短距离
Point2f ProjectPointToLineSegment(const Point2f& p, const Point2f& a, const Point2f& b) {
Point2f ab = b - a;
Point2f ap = p - a;
float t = ap.dot(ab) / ab.dot(ab);
t = std::max(0.0f, std::min(1.0f, t));
return a + ab * t;
}
// 辅助函数:从2D点计算重心坐标
Point3f BarycentricFromPoint(const Point2f& p, const Point2f& a, const Point2f& b, const Point2f& c) {
Point2f v0 = b - a;
Point2f v1 = c - a;
Point2f v2 = p - a;
float d00 = v0.dot(v0);
float d01 = v0.dot(v1);
float d11 = v1.dot(v1);
float d20 = v2.dot(v0);
float d21 = v2.dot(v1);
float denom = d00 * d11 - d01 * d01;
float v = (d11 * d20 - d01 * d21) / denom;
float w = (d00 * d21 - d01 * d20) / denom;
float u = 1.0f - v - w;
return Point3f(u, v, w);
}
// 辅助函数:判断点是否在三角形内
bool PointInTriangle(const Point2f& p, const Point2f& a, const Point2f& b, const Point2f& c, Point3f& barycentric) {
barycentric = BarycentricFromPoint(p, a, b, c);
return (barycentric.x >= 0 && barycentric.x <= 1 &&
barycentric.y >= 0 && barycentric.y <= 1 &&
barycentric.z >= 0 && barycentric.z <= 1);
}
// 辅助函数:从图像中双线性插值采样颜色
Pixel8U SampleImageBilinear(const Image8U3& image, const Point2f& point) {
int x = (int)point.x;
int y = (int)point.y;
// 确保在图像范围内
if (x < 0 || x >= image.cols-1 || y < 0 || y >= image.rows-1) {
return Pixel8U(0, 0, 0);
}
float dx = point.x - x;
float dy = point.y - y;
const Pixel8U& p00 = image(y, x);
const Pixel8U& p01 = image(y, x+1);
const Pixel8U& p10 = image(y+1, x);
const Pixel8U& p11 = image(y+1, x+1);
Pixel8U result;
for (int c = 0; c < 3; ++c) {
float top = p00[c] * (1-dx) + p01[c] * dx;
float bottom = p10[c] * (1-dx) + p11[c] * dx;
result[c] = (uint8_t)(top * (1-dy) + bottom * dy);
}
return result;
}
void FillTextureGaps(Image8U3& textureAtlas, const Mesh::TexCoordArr& faceTexcoords,
FIndex nFaces, const MeshTexture::LabelArr& faceLabels,
const IIndexArr& views, int textureSize, Pixel8U colEmpty)
{
DEBUG_EXTRA("开始填充纹理间隙...");
// 创建距离场,记录每个像素到最近三角形的距离
cv::Mat distanceField(textureSize, textureSize, CV_32FC1, cv::Scalar(std::numeric_limits<float>::max()));
cv::Mat nearestFaceIdx(textureSize, textureSize, CV_32SC1, cv::Scalar(-1));
// 1. 计算距离场
for (FIndex idxFace = 0; idxFace < nFaces; ++idxFace) {
if (faceLabels[idxFace] == 0) continue;
const TexCoord* uvCoords = &faceTexcoords[idxFace * 3];
// 计算三角形的包围盒
int minX = textureSize, maxX = 0;
int minY = textureSize, maxY = 0;
for (int i = 0; i < 3; ++i) {
int px = std::max(0, std::min(textureSize - 1, (int)(uvCoords[i].x * textureSize)));
int py = std::max(0, std::min(textureSize - 1, (int)(uvCoords[i].y * textureSize)));
minX = std::min(minX, px);
maxX = std::max(maxX, px);
minY = std::min(minY, py);
maxY = std::max(maxY, py);
}
// 扩展包围盒以填充间隙
const int margin = 3; // 扩展3个像素
minX = std::max(0, minX - margin);
maxX = std::min(textureSize - 1, maxX + margin);
minY = std::max(0, minY - margin);
maxY = std::min(textureSize - 1, maxY + margin);
for (int y = minY; y <= maxY; ++y) {
for (int x = minX; x <= maxX; ++x) {
Point2f texCoord((float)x / textureSize, (float)y / textureSize);
// 计算到三角形的最短距离
float minDist = std::numeric_limits<float>::max();
// 计算到三角形边的距离
for (int i = 0; i < 3; ++i) {
const Point2f& p1 = uvCoords[i];
const Point2f& p2 = uvCoords[(i + 1) % 3];
Point2f proj = ProjectPointToLineSegment(texCoord, p1, p2);
float dist = cv::norm(texCoord - proj);
minDist = std::min(minDist, dist);
}
// 如果距离更近,更新距离场
float& currentDist = distanceField.at<float>(y, x);
if (minDist < currentDist) {
currentDist = minDist;
nearestFaceIdx.at<int>(y, x) = idxFace;
}
}
}
}
// 2. 填充空白区域
const float maxFillDistance = 5.0f / textureSize; // 最大填充距离
for (int iteration = 0; iteration < 2; ++iteration) {
int filledCount = 0;
for (int y = 0; y < textureSize; ++y) {
for (int x = 0; x < textureSize; ++x) {
// 如果当前像素是空白
Pixel8U currentPixel = textureAtlas(y, x);
if (currentPixel[0] == colEmpty[0] &&
currentPixel[1] == colEmpty[1] &&
currentPixel[2] == colEmpty[2]) {
int nearestFace = nearestFaceIdx.at<int>(y, x);
float dist = distanceField.at<float>(y, x);
if (nearestFace >= 0 && dist <= maxFillDistance) {
// 找到最近的已填充像素
bool foundColor = false;
float sumB = 0.0f, sumG = 0.0f, sumR = 0.0f;
int count = 0;
// 检查3x3邻域
for (int dy = -1; dy <= 1; ++dy) {
for (int dx = -1; dx <= 1; ++dx) {
int ny = y + dy;
int nx = x + dx;
if (ny >= 0 && ny < textureSize && nx >= 0 && nx < textureSize) {
Pixel8U neighborPixel = textureAtlas(ny, nx);
if (neighborPixel[0] != colEmpty[0] ||
neighborPixel[1] != colEmpty[1] ||
neighborPixel[2] != colEmpty[2]) {
// 注意:OpenCV的Mat存储顺序是BGR
sumB += static_cast<float>(neighborPixel[0]);
sumG += static_cast<float>(neighborPixel[1]);
sumR += static_cast<float>(neighborPixel[2]);
count++;
foundColor = true;
}
}
}
}
if (foundColor && count > 0) {
// 手动计算平均值
Pixel8U avgColor;
avgColor[0] = static_cast<unsigned char>(sumB / count + 0.5f); // B
avgColor[1] = static_cast<unsigned char>(sumG / count + 0.5f); // G
avgColor[2] = static_cast<unsigned char>(sumR / count + 0.5f); // R
textureAtlas(y, x) = avgColor;
filledCount++;
}
}
}
}
}
DEBUG_EXTRA("填充迭代 %d: 填充了 %d 个像素", iteration + 1, filledCount);
if (filledCount == 0) break;
}
// 3. 使用图像修复算法填充剩余空白
cv::Mat mask = cv::Mat::zeros(textureSize, textureSize, CV_8UC1);
for (int y = 0; y < textureSize; ++y) {
for (int x = 0; x < textureSize; ++x) {
Pixel8U pixel = textureAtlas(y, x);
if (pixel[0] == colEmpty[0] && pixel[1] == colEmpty[1] && pixel[2] == colEmpty[2]) {
mask.at<unsigned char>(y, x) = 255;
}
}
}
if (cv::countNonZero(mask) > 0) {
cv::Mat textureMat = textureAtlas; // OpenCV Mat会自动引用计数
cv::Mat inpaintResult;
cv::inpaint(textureMat, mask, inpaintResult, 3, cv::INPAINT_TELEA);
inpaintResult.copyTo(textureAtlas);
}
DEBUG_EXTRA("纹理间隙填充完成");
}
Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasFromUV(const LabelArr& faceLabels, const IIndexArr& views, Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasFromUV(const LabelArr& faceLabels, const IIndexArr& views,
unsigned nTextureSizeMultiple, Pixel8U colEmpty, float fSharpnessWeight) unsigned nTextureSizeMultiple, Pixel8U colEmpty, float fSharpnessWeight)
{ {
@ -9726,6 +9942,7 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasFromUV(const LabelArr& faceLa
DEBUG_EXTRA("生成纹理图集: 尺寸=%dx%d, UV范围=[%.3f,%.3f]-[%.3f,%.3f]", DEBUG_EXTRA("生成纹理图集: 尺寸=%dx%d, UV范围=[%.3f,%.3f]-[%.3f,%.3f]",
textureSize, textureSize, textureSize, textureSize,
uvBounds.ptMin.x(), uvBounds.ptMin.y(), uvBounds.ptMax.x(), uvBounds.ptMax.y()); uvBounds.ptMin.x(), uvBounds.ptMin.y(), uvBounds.ptMax.x(), uvBounds.ptMax.y());
// 4. 为每个面片采样颜色并填充纹理图集 // 4. 为每个面片采样颜色并填充纹理图集
#ifdef _USE_OPENMP #ifdef _USE_OPENMP
#pragma omp parallel for schedule(dynamic) #pragma omp parallel for schedule(dynamic)
@ -9762,12 +9979,10 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasFromUV(const LabelArr& faceLa
for (int x = startX; x <= endX; ++x) { for (int x = startX; x <= endX; ++x) {
const Point2f texCoord((float)x / textureSize, (float)y / textureSize); const Point2f texCoord((float)x / textureSize, (float)y / textureSize);
// 检查是否在三角形内 // 1. 检查是否在三角形内
Point3f barycentric; Point3f barycentric;
if (!PointInTriangle(texCoord, uvCoords[0], uvCoords[1], uvCoords[2], barycentric)) { if (PointInTriangle(texCoord, uvCoords[0], uvCoords[1], uvCoords[2], barycentric)) {
continue; // 标准内部采样
}
// 计算3D空间中的对应点 // 计算3D空间中的对应点
const Vertex worldPoint = const Vertex worldPoint =
vertices[face[0]] * barycentric.x + vertices[face[0]] * barycentric.x +
@ -9775,25 +9990,22 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasFromUV(const LabelArr& faceLa
vertices[face[2]] * barycentric.z; vertices[face[2]] * barycentric.z;
// 将3D点投影到源图像 // 将3D点投影到源图像
// Point2f imgPoint = sourceImage.camera.ProjectPointP(worldPoint);
// Point2f imgPoint = ProjectPointRobust(sourceImage.camera, worldPoint, sourceImage, 0.02f);
Point2f imgPoint = ProjectPointWithAutoCorrection(sourceImage.camera, worldPoint, sourceImage); Point2f imgPoint = ProjectPointWithAutoCorrection(sourceImage.camera, worldPoint, sourceImage);
if (!ValidateProjection(worldPoint, sourceImage, imgPoint)) // 验证投影的有效性
{ if (!ValidateProjection(worldPoint, sourceImage, imgPoint)) {
continue; // 跳过几何不一致的采样点 continue; // 跳过几何不一致的采样点
} }
// 检查投影是否在图像边界内
if (imgPoint.x < -100 || imgPoint.x > sourceImage.image.cols + 100 || if (imgPoint.x < -100 || imgPoint.x > sourceImage.image.cols + 100 ||
imgPoint.y < -100 || imgPoint.y > sourceImage.image.rows + 100) { imgPoint.y < -100 || imgPoint.y > sourceImage.image.rows + 100) {
// 投影异常,记录日志用于调试 // 投影异常,记录日志用于调试
DEBUG_EXTRA("异常投影: 图像点(%.1f,%.1f) 超出图像范围(%dx%d)", DEBUG_EXTRA("异常投影: 图像点(%.1f,%.1f) 超出图像范围(%dx%d)",
imgPoint.x, imgPoint.y, sourceImage.image.cols, sourceImage.image.rows); imgPoint.x, imgPoint.y, sourceImage.image.cols, sourceImage.image.rows);
continue;
} }
// imgPoint.y += sourceImage.image.rows * 0.015f;
// 检查投影有效性 // 检查投影有效性
if (!sourceImage.image.isInside(imgPoint) || if (!sourceImage.image.isInside(imgPoint) ||
!sourceImage.camera.IsInFront(worldPoint)) { !sourceImage.camera.IsInFront(worldPoint)) {
@ -9811,23 +10023,79 @@ Mesh::Image8U3Arr MeshTexture::GenerateTextureAtlasFromUV(const LabelArr& faceLa
textureAtlas(y, x) = sampledColor; textureAtlas(y, x) = sampledColor;
} }
} }
// 2. 如果不在三角形内,检查是否在三角形边缘附近
else {
// 计算到三角形边缘的最近距离
float minDist = std::numeric_limits<float>::max();
Point2f closestPoint;
// 检查三条边
for (int i = 0; i < 3; ++i) {
const Point2f& p1 = uvCoords[i];
const Point2f& p2 = uvCoords[(i + 1) % 3];
Point2f proj = ProjectPointToLineSegment(texCoord, p1, p2);
float dist = cv::norm(texCoord - proj);
if (dist < minDist) {
minDist = dist;
closestPoint = proj;
}
}
// 如果距离小于阈值,进行边缘填充
const float edgeThreshold = 1.0f / textureSize; // 1个像素的阈值
if (minDist <= edgeThreshold * 2) { // 填充边缘周围2个像素
// 在最近点计算重心坐标
Point3f edgeBarycentric = BarycentricFromPoint(closestPoint, uvCoords[0], uvCoords[1], uvCoords[2]);
if (edgeBarycentric.x >= 0 && edgeBarycentric.x <= 1 &&
edgeBarycentric.y >= 0 && edgeBarycentric.y <= 1 &&
edgeBarycentric.z >= 0 && edgeBarycentric.z <= 1) {
const Vertex worldPoint =
vertices[face[0]] * edgeBarycentric.x +
vertices[face[1]] * edgeBarycentric.y +
vertices[face[2]] * edgeBarycentric.z;
Point2f imgPoint = ProjectPointWithAutoCorrection(sourceImage.camera, worldPoint, sourceImage);
if (ValidateProjection(worldPoint, sourceImage, imgPoint) &&
sourceImage.image.isInside(imgPoint) &&
sourceImage.camera.IsInFront(worldPoint)) {
Pixel8U sampledColor = SampleImageBilinear(sourceImage.image, imgPoint);
#ifdef _USE_OPENMP
#pragma omp critical
#endif
{
textureAtlas(y, x) = sampledColor;
}
}
}
} }
} }
}
}
}
// 5. 添加后处理填充函数
// 5. 应用后处理 FillTextureGaps(textureAtlas, scene.mesh.faceTexcoords, (FIndex)scene.mesh.faces.size(), faceLabels, views, textureSize, colEmpty);
// 6. 应用后处理
if (fSharpnessWeight > 0) { if (fSharpnessWeight > 0) {
// ApplySharpening(textureAtlas, fSharpnessWeight); // ApplySharpening(textureAtlas, fSharpnessWeight);
} }
// 6. 填充空白区域(使用邻近像素扩散)
// FillEmptyRegions(textureAtlas, colEmpty);
DEBUG_EXTRA("纹理图集生成完成: %u个面片, 纹理尺寸%dx%d", DEBUG_EXTRA("纹理图集生成完成: %u个面片, 纹理尺寸%dx%d",
scene.mesh.faces.size(), textureSize, textureSize); scene.mesh.faces.size(), textureSize, textureSize);
return textures; return textures;
} }
bool MeshTexture::ValidateProjection(const Vertex& worldPoint, bool MeshTexture::ValidateProjection(const Vertex& worldPoint,
const Image& sourceImage, Point2f imgPoint, const Image& sourceImage, Point2f imgPoint,
float maxReprojectionError) { float maxReprojectionError) {

Loading…
Cancel
Save