diff --git a/libs/MVS/SceneTexture.cpp b/libs/MVS/SceneTexture.cpp index 9921dcd..ec79778 100644 --- a/libs/MVS/SceneTexture.cpp +++ b/libs/MVS/SceneTexture.cpp @@ -458,7 +458,21 @@ public: void CreateVirtualFaces3(const FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f) const; void CreateVirtualFaces4(const FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, Mesh::FaceIdxArr& mapFaceToVirtualFace, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f); void CreateVirtualFaces5(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f) const; - bool CreateVirtualFaces6(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f) const; + bool CreateVirtualFaces6(FaceDataViewArr& facesDatas, FaceDataViewArr& ComputeAverageQuality, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f) const; + bool CreateVirtualFaces61(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f); + bool CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataViewArr& ComputeAverageQuality, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f) const; + float CalculateBrightnessScore(const Image& imageData) const; + bool IsFaceVisibleRelaxed(const FaceDataArr& faceDatas, const IIndexArr& selectedCams, float visibleRatioThreshold) const; + bool CheckColorConsistency(const FaceDataArr& centerDatas, const FaceDataArr& candidateDatas) const; + void MergeSmallVirtualFaces(VirtualFaceIdxsArr& virtualFaces, FaceDataViewArr& virtualFacesDatas, const FaceDataViewArr& facesDatas) const; + float CalculateMergeScore(const FaceDataArr& vfData1, const FaceDataArr& vfData2) const; + float ComputeViewSimilarity(const FaceDataArr& views1, const FaceDataArr& views2) const; + float ComputeColorSimilarity(const Color& color1, const Color& color2) const; + float ComputeQualitySimilarity(float quality1, float quality2) const; + Color ComputeAverageColor(const FaceDataArr& virtualFaceData) const; + float ComputeAverageQuality(const FaceDataArr& virtualFaceData) const; + FaceDataArr MergeFaceData(const FaceDataArr& data1, const FaceDataArr& data2) const; + bool CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras=2, float thMaxNormalDeviation=25.f) const; IIndexArr SelectBestViews(const FaceDataArr& faceDatas, FIndex fid, unsigned minCommonCameras, float ratioAngleToQuality) const; IIndexArr SelectBestView(const FaceDataArr& faceDatas, FIndex fid, unsigned minCommonCameras, float ratioAngleToQuality) const; @@ -1804,8 +1818,6 @@ IIndexArr MeshTexture::SelectBestViews(const FaceDataArr& faceDatas, FIndex fid, } return (bestView != NO_ID) ? IIndexArr{bestView} : IIndexArr(); } - - //*/ // compute scores based on the view quality and its angle to the face normal @@ -3626,7 +3638,8 @@ bool MeshTexture::CreateVirtualFaces6(FaceDataViewArr& facesDatas, FaceDataViewA // 基于MAD设置动态阈值(3倍MAD是统计学上常用的异常值阈值) const float maxColorDeviation = 0.01f * colorMAD; - const float maxLuminanceDeviation = 0.01f * luminanceMAD; + // const float maxLuminanceDeviation = 0.01f * luminanceMAD; + const float maxLuminanceDeviation = 0.05f * luminanceMAD; std::vector validIndices; for (int n = 0; n < sortedViews.size(); ++n) { @@ -3638,6 +3651,7 @@ bool MeshTexture::CreateVirtualFaces6(FaceDataViewArr& facesDatas, FaceDataViewA // if (colorDistance <= maxColorDeviation && // luminanceDistance <= maxLuminanceDeviation) + if (luminanceDistance <= maxLuminanceDeviation) { validIndices.push_back(n); } @@ -3673,7 +3687,472 @@ bool MeshTexture::CreateVirtualFaces6(FaceDataViewArr& facesDatas, FaceDataViewA return true; } -bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras, float thMaxNormalDeviation) const +bool MeshTexture::CreateVirtualFaces61(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, + std::vector& isVirtualFace, unsigned minCommonCameras, float thMaxNormalDeviation) +{ + // 1. 优化参数设定 + const float ratioAngleToQuality(0.67f); + // 放宽法线偏差阈值,鼓励在曲率平缓区域合并更多面片 + const float relaxedThMaxNormalDeviation = thMaxNormalDeviation * 1.5f; + const float cosMaxNormalDeviation(COS(FD2R(relaxedThMaxNormalDeviation))); + + // 2. 预计算面片属性,用于优化生长顺序 + std::vector faceFlatness(faces.size(), 0.0f); + // 计算每个面片的平坦度(通过其与相邻面片的法线差异) + FOREACH(f, faces) { + const Normal& normal = scene.mesh.faceNormals[f]; + float totalCosAngle = 0.0f; + int neighborCount = 0; + const Mesh::FaceFaces& neighbors = faceFaces[f]; + for (int i = 0; i < 3; ++i) { + if (neighbors[i] != NO_ID) { + const Normal& neighborNormal = scene.mesh.faceNormals[neighbors[i]]; + totalCosAngle += normal.dot(neighborNormal); + neighborCount++; + } + } + if (neighborCount > 0) { + faceFlatness[f] = totalCosAngle / neighborCount; // 值越大表示越平坦 + } + } + + Mesh::FaceIdxArr remainingFaces(faces.size()); + std::iota(remainingFaces.begin(), remainingFaces.end(), 0); + std::vector selectedFaces(faces.size(), false); + + // 3. 优化生长顺序:按平坦度和可用视图数量排序,优先选择"好"的种子点 + std::sort(remainingFaces.begin(), remainingFaces.end(), [&](FIndex a, FIndex b) { + // 优先选择更平坦、拥有更多视图的面片作为种子 + if (std::abs(faceFlatness[a] - faceFlatness[b]) > 0.1f) { + return faceFlatness[a] > faceFlatness[b]; // 平坦度降序 + } + return facesDatas[a].size() > facesDatas[b].size(); // 视图数降序 + }); + + cQueue currentVirtualFaceQueue; + std::unordered_set queuedFaces; + + do { + // 总是从剩余面片中最优的那个开始(由于已排序,第一个就是最优的) + const FIndex virtualFaceCenterFaceID = remainingFaces[0]; + ASSERT(currentVirtualFaceQueue.IsEmpty()); + const Normal& normalCenter = scene.mesh.faceNormals[virtualFaceCenterFaceID]; + const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID]; + + Mesh::FaceIdxArr virtualFace; + FaceDataArr virtualFaceDatas; + + if (centerFaceDatas.empty()) { + virtualFace.emplace_back(virtualFaceCenterFaceID); + selectedFaces[virtualFaceCenterFaceID] = true; + const auto posToErase = remainingFaces.FindFirst(virtualFaceCenterFaceID); + ASSERT(posToErase != Mesh::FaceIdxArr::NO_INDEX); + remainingFaces.RemoveAtMove(posToErase); + } else { + // 4. 优化视图选择:使用更宽松的共视相机选择策略 + IIndexArr selectedCams = SelectBestViews(centerFaceDatas, virtualFaceCenterFaceID, + std::max(1u, minCommonCameras/2), // 降低最小共视要求 + ratioAngleToQuality); + + currentVirtualFaceQueue.AddTail(virtualFaceCenterFaceID); + queuedFaces.clear(); + + do { + const FIndex currentFaceId = currentVirtualFaceQueue.GetHead(); + currentVirtualFaceQueue.PopHead(); + + + // 安全检查1:确保面ID有效 + if (currentFaceId >= faces.size()) { + DEBUG_EXTRA("Warning: Invalid face ID %u in queue (max: %zu)", + currentFaceId, faces.size()); + continue; + } + + // 安全检查2:确保面未被处理过 + if (selectedFaces[currentFaceId]) { + continue; + } + // 5. 优化生长条件检查 + const Normal& faceNormal = scene.mesh.faceNormals[currentFaceId]; + const float cosFaceToCenter(ComputeAngleN(normalCenter.ptr(), faceNormal.ptr())); + + // 条件1: 法线一致性(使用放宽的阈值) + if (cosFaceToCenter < cosMaxNormalDeviation) + continue; + + // 条件2: 视图可见性(允许部分视图不可见,例如70%的选定相机可见即可) + if (!IsFaceVisibleRelaxed(facesDatas[currentFaceId], selectedCams, 0.7f)) // 新增宽松检查函数 + continue; + + // 条件3: 颜色一致性(新增检查,避免颜色差异过大的面片合并) + if (!CheckColorConsistency(facesDatas[virtualFaceCenterFaceID], + facesDatas[currentFaceId])) // 新增颜色检查函数 + continue; + + // 添加到虚拟面片 + const auto posToErase = remainingFaces.FindFirst(currentFaceId); + + + if (posToErase == Mesh::FaceIdxArr::NO_INDEX) { + DEBUG_EXTRA("Warning: Face %u not found in remainingFaces", currentFaceId); + // 继续处理,但跳过移除操作 + } else { + // 确保索引有效 + if (posToErase < remainingFaces.size()) { + remainingFaces.RemoveAtMove(posToErase); + } else { + DEBUG_EXTRA("Error: Invalid position %zu for face %u", + posToErase, currentFaceId); + } + } + + selectedFaces[currentFaceId] = true; + virtualFace.push_back(currentFaceId); + + // 6. 优化邻居添加策略:优先添加平坦的邻居 + const Mesh::FaceFaces& ffaces = faceFaces[currentFaceId]; + std::vector neighborsToAdd; + for (int i = 0; i < 3; ++i) { + const FIndex fIdx = ffaces[i]; + if (fIdx == NO_ID) continue; + if (!selectedFaces[fIdx] && queuedFaces.find(fIdx) == queuedFaces.end()) { + neighborsToAdd.push_back(fIdx); + } + } + // 按平坦度对邻居排序,优先生长到平坦区域 + std::sort(neighborsToAdd.begin(), neighborsToAdd.end(), [&](FIndex a, FIndex b) { + return faceFlatness[a] > faceFlatness[b]; + }); + for (FIndex nid : neighborsToAdd) { + currentVirtualFaceQueue.AddTail(nid); + queuedFaces.emplace(nid); + } + + } while (!currentVirtualFaceQueue.IsEmpty() && virtualFace.size() < 500); // 增加面片数量上限 + + // 计算虚拟面片数据(原有逻辑) + for (IIndex idxView: selectedCams) { + FaceData& virtualFaceData = virtualFaceDatas.emplace_back(); + virtualFaceData.quality = 0; + virtualFaceData.idxView = idxView; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + virtualFaceData.color = Point3f::ZERO; + #endif + unsigned processedFaces(0); + for (FIndex fid : virtualFace) { + const FaceDataArr& faceDatas = facesDatas[fid]; + for (FaceData& faceData: faceDatas) { + if (faceData.idxView == idxView) { + virtualFaceData.quality += faceData.quality; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + virtualFaceData.color += faceData.color; + #endif + ++processedFaces; + break; + } + } + } + if (processedFaces > 0) { + virtualFaceData.quality /= processedFaces; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + virtualFaceData.color /= processedFaces; + #endif + } + } + ASSERT(!virtualFaceDatas.empty()); + } + virtualFacesDatas.emplace_back(std::move(virtualFaceDatas)); + virtualFaces.emplace_back(std::move(virtualFace)); + + // 更新剩余面片列表后重新排序,确保下次仍从最优种子开始 + if (remainingFaces.size() > 1) { + std::sort(remainingFaces.begin(), remainingFaces.end(), [&](FIndex a, FIndex b) { + if (std::abs(faceFlatness[a] - faceFlatness[b]) > 0.1f) { + return faceFlatness[a] > faceFlatness[b]; + } + return facesDatas[a].size() > facesDatas[b].size(); + }); + } + } while (!remainingFaces.empty()); + + // 7. 后处理:合并小虚拟面片 + MergeSmallVirtualFaces(virtualFaces, virtualFacesDatas, facesDatas); +} + +// 宽松的视图可见性检查(允许部分视图不可见) +bool MeshTexture::IsFaceVisibleRelaxed(const FaceDataArr& faceDatas, const IIndexArr& selectedCams, float visibleRatioThreshold) const +{ + if (selectedCams.empty()) return false; + + int visibleCount = 0; + for (IIndex camIdx : selectedCams) { + for (const FaceData& data : faceDatas) { + if (data.idxView == camIdx) { + visibleCount++; + break; + } + } + } + float visibleRatio = static_cast(visibleCount) / selectedCams.size(); + return visibleRatio >= visibleRatioThreshold; +} + +// 颜色一致性检查 +bool MeshTexture::CheckColorConsistency(const FaceDataArr& centerDatas, const FaceDataArr& candidateDatas) const +{ + // 简化实现:计算两个面片在共视相机下的平均颜色差异 + float totalColorDiff = 0.0f; + int commonViewCount = 0; + + for (const FaceData& centerData : centerDatas) { + for (const FaceData& candData : candidateDatas) { + if (centerData.idxView == candData.idxView) { + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + float diff = cv::norm(centerData.color - candData.color); + totalColorDiff += diff; + #endif + commonViewCount++; + break; + } + } + } + + if (commonViewCount == 0) return false; + + float avgColorDiff = totalColorDiff / commonViewCount; + return avgColorDiff < 30.0f; // 颜色差异阈值,可调整 +} + +// 合并小虚拟面片后处理 +void MeshTexture::MergeSmallVirtualFaces(VirtualFaceIdxsArr& virtualFaces, FaceDataViewArr& virtualFacesDatas, const FaceDataViewArr& facesDatas) const +{ + const size_t MIN_DESIRED_SIZE = 20; // 期望的最小面片大小 + + for (size_t i = 0; i < virtualFaces.size(); ++i) { + if (virtualFaces[i].size() >= MIN_DESIRED_SIZE) continue; + + // 寻找最合适的相邻虚拟面片进行合并 + int bestMergeTarget = -1; + float bestScore = -1.0f; + + for (size_t j = 0; j < virtualFaces.size(); ++j) { + if (i == j || virtualFaces[j].empty()) continue; + + // 计算合并得分(基于共视相机相似性) + float score = CalculateMergeScore(virtualFacesDatas[i], virtualFacesDatas[j]); + if (score > bestScore) { + bestScore = score; + bestMergeTarget = j; + } + } + + if (bestMergeTarget != -1 && bestScore > 0.5f) { + // 执行合并 + virtualFaces[bestMergeTarget].Join(virtualFaces[i]); + virtualFaces[i].clear(); // 标记为待删除 + virtualFacesDatas[bestMergeTarget] = MergeFaceData(virtualFacesDatas[bestMergeTarget], + virtualFacesDatas[i]); + } + } + + // 清理已被合并的空虚拟面片 + // ... (清理逻辑) +} + +float MeshTexture::CalculateMergeScore(const FaceDataArr& vfData1, const FaceDataArr& vfData2) const +{ + if (vfData1.empty() || vfData2.empty()) { + return 0.0f; // 空数据无法合并 + } + + float totalScore = 0.0f; + int validComparisons = 0; + + // 1. 视图共现分析:检查共享的视图 + std::unordered_set commonViews; + std::unordered_set allViews; + + // 收集所有视图 + for (const FaceData& data : vfData1) { + allViews.insert(data.idxView); + } + for (const FaceData& data : vfData2) { + if (allViews.find(data.idxView) != allViews.end()) { + commonViews.insert(data.idxView); + } + } + + // 计算视图重叠度得分 + float viewOverlapScore = 0.0f; + if (!allViews.empty()) { + viewOverlapScore = static_cast(commonViews.size()) / allViews.size(); + } + totalScore += viewOverlapScore * 0.4f; // 视图重叠权重 40% + validComparisons++; + + // 2. 颜色一致性分析 + Color avgColor1 = ComputeAverageColor(vfData1); + Color avgColor2 = ComputeAverageColor(vfData2); + float colorSimilarity = ComputeColorSimilarity(avgColor1, avgColor2); + totalScore += colorSimilarity * 0.3f; // 颜色一致性权重 30% + validComparisons++; + + // 3. 质量一致性分析 + float avgQuality1 = ComputeAverageQuality(vfData1); + float avgQuality2 = ComputeAverageQuality(vfData2); + float qualitySimilarity = ComputeQualitySimilarity(avgQuality1, avgQuality2); + totalScore += qualitySimilarity * 0.3f; // 质量一致性权重 30% + validComparisons++; + + // 4. 有效性检查:确保有足够的共享视图 + if (commonViews.size() < 2) { + totalScore *= 0.5f; // 严重惩罚视图重叠不足的情况 + } + + // 5. 数据完整性检查:如果某个面片数据质量很差,降低得分 + if (avgQuality1 < 0.1f || avgQuality2 < 0.1f) { + totalScore *= 0.7f; + } + + return std::max(0.0f, std::min(1.0f, totalScore)); +} +float MeshTexture::ComputeViewSimilarity(const FaceDataArr& views1, const FaceDataArr& views2) const +{ + if (views1.empty() || views2.empty()) { + return 0.0f; + } + + // 创建视图ID到质量的映射 + std::unordered_map viewQualityMap1; + std::unordered_map viewQualityMap2; + + for (const FaceData& data : views1) { + viewQualityMap1[data.idxView] = data.quality; + } + for (const FaceData& data : views2) { + viewQualityMap2[data.idxView] = data.quality; + } + + // 计算共享视图的相似度 + float sharedViewScore = 0.0f; + int sharedCount = 0; + + for (const auto& [viewId, quality1] : viewQualityMap1) { + auto it = viewQualityMap2.find(viewId); + if (it != viewQualityMap2.end()) { + float quality2 = it->second; + float qualitySim = 1.0f - std::abs(quality1 - quality2) / std::max(quality1, quality2); + sharedViewScore += qualitySim; + sharedCount++; + } + } + + if (sharedCount == 0) { + return 0.0f; + } + + float overlapRatio = static_cast(sharedCount) / + std::max(viewQualityMap1.size(), viewQualityMap2.size()); + + return (sharedViewScore / sharedCount) * overlapRatio; +} + +float MeshTexture::ComputeColorSimilarity(const Color& color1, const Color& color2) const +{ + // 计算欧氏距离并转换为相似度 (0-1) + float distance = cv::norm(color1 - color2); + float maxDistance = 255.0f * std::sqrt(3.0f); // 最大可能距离 + + // 使用指数衰减函数将距离转换为相似度 + float similarity = std::exp(-distance / (0.3f * maxDistance)); + return std::max(0.0f, std::min(1.0f, similarity)); +} + +float MeshTexture::ComputeQualitySimilarity(float quality1, float quality2) const +{ + if (quality1 <= 0 || quality2 <= 0) { + return 0.0f; + } + + float ratio = std::min(quality1, quality2) / std::max(quality1, quality2); + return ratio; +} + +MeshTexture::Color MeshTexture::ComputeAverageColor(const FaceDataArr& virtualFaceData) const +{ + Color sumColor = Color::ZERO; + float totalWeight = 0.0f; + int sampleCount = 0; + + // 对每个面片采样有限数量的视图进行计算,避免过度计算 + const int MAX_SAMPLES_PER_FACE = 3; + + for (const auto& data : virtualFaceData) { + int samplesTaken = 0; + + if (samplesTaken >= MAX_SAMPLES_PER_FACE) break; + + if (!data.bInvalidFacesRelative && data.quality > 0.1f) { + sumColor += data.color * data.quality; + totalWeight += data.quality; + samplesTaken++; + } + + sampleCount += samplesTaken; + } + + if (totalWeight > 0 && sampleCount > 0) { + return sumColor / totalWeight; + } + + // 回退策略:简单平均 + Color simpleSum = Color::ZERO; + int count = 0; + for (const FaceData& data : virtualFaceData) { + if (!data.bInvalidFacesRelative) { + simpleSum += data.color; + count++; + } + } + + return count > 0 ? simpleSum / count : Color::ZERO; +} +float MeshTexture::ComputeAverageQuality(const FaceDataArr& virtualFaceData) const +{ + float totalQuality = 0.0f; + int count = 0; + + // 使用迭代器遍历外层列表 + for (auto it = virtualFaceData.begin(); it != virtualFaceData.end(); ++it) { + const FaceData& data = *it; // 直接解引用迭代器得到 FaceData + if (!data.bInvalidFacesRelative) { + totalQuality += data.quality; + count++; + } + } + + return count > 0 ? totalQuality / static_cast(count) : 0.0f; +} +MeshTexture::FaceDataArr MeshTexture::MergeFaceData(const FaceDataArr& data1, const FaceDataArr& data2) const +{ + FaceDataArr mergedData; + + // 预留足够的空间以避免多次重新分配 + mergedData.reserve(data1.size() + data2.size()); + + for (auto it = data1.begin(); it != data1.end(); ++it) { + mergedData.emplace_back(*it); // 或者使用 InsertBefore 等其他方法 + } + for (auto it = data2.begin(); it != data2.end(); ++it) { + mergedData.emplace_back(*it); // 或者使用 InsertBefore 等其他方法 + } + + return mergedData; +} + + +bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras, float thMaxNormalDeviation) const { if (meshCurvatures.empty()) { ComputeFaceCurvatures(); @@ -3788,7 +4267,7 @@ bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewA } else { IIndexArr selectedCams = SelectBestViews(centerFaceDatas, virtualFaceCenterFaceID, minCommonCameras, ratioAngleToQuality); - /* + //* // 获取中心面片的法线 (注意变量名是 normalCenter, 不是 centerNormal) const Normal& normalCenter = scene.mesh.faceNormals[virtualFaceCenterFaceID]; // 过滤selectedCams:只保留夹角小于30度的视图 @@ -3846,12 +4325,48 @@ bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewA if (angleDeg <= 45.0f) { - filteredCams.push_back(idxView); + // filteredCams.push_back(idxView); + //* + float brightnessScore = CalculateBrightnessScore(imageData); // 亮度评分函数 + float angleScore = 1.0f - (angleDeg / 45.0f); + float qualityScore = 0.0f; + const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID]; + for (const FaceData& fd : centerFaceDatas) { + if (fd.idxView == idxView) { + qualityScore = fd.quality; + break; + } + } + qualityScore = std::max(0.0f, std::min(1.0f, qualityScore)); + float overallScore = 0.5f * angleScore + 0.3f * brightnessScore + 0.2f * qualityScore; + if (overallScore > 0.15f) { + filteredCams.push_back(idxView); + } + //*/ } + } else { - filteredCams.push_back(idxView); + // filteredCams.push_back(idxView); + + //* + float brightnessScore = CalculateBrightnessScore(imageData); // 亮度评分函数 + float angleScore = 1.0f - (angleDeg / 45.0f); + float qualityScore = 0.0f; + const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID]; + for (const FaceData& fd : centerFaceDatas) { + if (fd.idxView == idxView) { + qualityScore = fd.quality; + break; + } + } + qualityScore = std::max(0.0f, std::min(1.0f, qualityScore)); + float overallScore = 0.5f * angleScore + 0.3f * brightnessScore + 0.2f * qualityScore; + if (overallScore > 0.15f) { + filteredCams.push_back(idxView); + } + //*/ } } } @@ -3903,21 +4418,6 @@ bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewA } } - /* - // #ifdef TEXOPT_USE_OPENMP - // #pragma omp critical - // #endif - // std::lock_guard lock(*scene.mesh.invalidFaces.mtx); - // if (scene.mesh.invalidFaces.data.find(currentFaceId) != scene.mesh.invalidFaces.data.end()) { - // continue; // 跳过无效面 - // } - - // 检查是否被所有选定相机有效看到 - if (!IsFaceVisibleAndValid(facesDatas[currentFaceId], selectedCams)) { - continue; - } - //*/ - // remove it from remaining faces and add it to the virtual face { const auto posToErase = remainingFaces.FindFirst(currentFaceId); @@ -3939,18 +4439,6 @@ bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewA } } while (!currentVirtualFaceQueue.IsEmpty()); - /* - if (selectedCams.empty()) { - const Color medianColor = ComputeMedianColorAndQuality(sortedViews).color; - const float medianQuality = ComputeMedianColorAndQuality(sortedViews).quality; - - FaceData& virtualFaceData = virtualFaceDatas.emplace_back(); - virtualFaceData.color = medianColor; - virtualFaceData.quality = medianQuality; - - } - */ - // compute virtual face quality and create virtual face for (IIndex idxView: selectedCams) { FaceData& virtualFaceData = virtualFaceDatas.emplace_back(); @@ -3967,10 +4455,565 @@ bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewA for (FIndex fid : virtualFace) { const FaceDataArr& faceDatas = facesDatas[fid]; for (FaceData& faceData: faceDatas) { - /* - // if (faceData.idxView == idxView) { - if (faceData.idxView == idxView && !faceData.bInvalidFacesRelative) { - virtualFaceData.quality += faceData.quality; + + int nViewCount = 0; + if (faceData.idxView == idxView) + { + for (const FaceData& fd : faceDatas) + { + if ( faceData.bInvalidFacesRelative) + { + ++nViewCount; + } + } + // if (faceData.bInvalidFacesRelative) + if (bHasInvalidView) + { + // invalidQuality += faceData.quality; + // #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + // invalidColor += faceData.color; + // #endif + + ++processedFaces; + } + else + { + // virtualFaceData.quality += faceData.quality; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + // virtualFaceData.color += faceData.color; + #endif + ++processedFaces; + // break; + } + } + } + } + + float maxLuminance = 120.0f; + float minLuminance = 90.0f; + int validViewsSize = validViews.size(); + // bHasInvalidView = true; + if (bHasInvalidView) + { + // 使用鲁棒的统计方法计算颜色和亮度的中心值 + const Color medianColor = ComputeMedianColorAndQuality(sortedViews).color; + const float medianQuality = ComputeMedianColorAndQuality(sortedViews).quality; + const float medianLuminance = ComputeMedianLuminance(sortedViews); + + // 计算颜色和亮度的绝对中位差(MAD)作为偏差阈值 + const float colorMAD = ComputeColorMAD(sortedViews, medianColor); + const float luminanceMAD = ComputeLuminanceMAD(sortedViews, medianLuminance); + + // 基于MAD设置动态阈值(3倍MAD是统计学上常用的异常值阈值) + const float maxColorDeviation = 0.01f * colorMAD; + const float maxLuminanceDeviation = 0.01f * luminanceMAD; + + std::vector validIndices; + for (int n = 0; n < sortedViews.size(); ++n) { + const Color& viewColor = sortedViews[n].second; + const float viewLuminance = MeshTexture::GetLuminance(viewColor); + + const float colorDistance = cv::norm(viewColor - medianColor); + const float luminanceDistance = std::abs(viewLuminance - medianLuminance); + + if (colorDistance <= maxColorDeviation && + luminanceDistance <= maxLuminanceDeviation) + { + validIndices.push_back(n); + } + else + { + const FIndex currentFaceId = currentVirtualFaceQueue.GetHead(); + const Normal& faceNormal = scene.mesh.faceNormals[currentFaceId]; + const float cosFaceToCenter(ComputeAngleN(normalCenter.ptr(), faceNormal.ptr())); + + bool bColorSimilarity = true; + // Check color similarity + const Color& centerColor = faceColors[virtualFaceCenterFaceID]; + const Color& currentColor = faceColors[currentFaceId]; + + float colorDistance = cv::norm(centerColor - currentColor); + // printf("1colorDistance=%f\n", colorDistance); + if (colorDistance > thMaxColorDeviation) { + // printf("2colorDistance=%f\n", colorDistance); + bColorSimilarity = false; + } + + // if ((cosFaceToCenter 0); + // virtualFaceData.quality /= processedFaces; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + // virtualFaceData.color /= processedFaces; + #endif + + virtualFaceData.quality = 0; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + virtualFaceData.color = Point3f::ZERO; + #endif + } + } + else + { + // 使用鲁棒的统计方法计算颜色和亮度的中心值 + const Color medianColor = ComputeMedianColorAndQuality(sortedViews).color; + const float medianQuality = ComputeMedianColorAndQuality(sortedViews).quality; + const float medianLuminance = ComputeMedianLuminance(sortedViews); + + // 计算颜色和亮度的绝对中位差(MAD)作为偏差阈值 + const float colorMAD = ComputeColorMAD(sortedViews, medianColor); + const float luminanceMAD = ComputeLuminanceMAD(sortedViews, medianLuminance); + + // 基于MAD设置动态阈值(3倍MAD是统计学上常用的异常值阈值) + const float maxColorDeviation = 0.01f * colorMAD; + // const float maxLuminanceDeviation = 0.01f * luminanceMAD; + const float maxLuminanceDeviation = 0.05f * luminanceMAD; + + std::vector validIndices; + for (int n = 0; n < sortedViews.size(); ++n) { + const Color& viewColor = sortedViews[n].second; + const float viewLuminance = MeshTexture::GetLuminance(viewColor); + + const float colorDistance = cv::norm(viewColor - medianColor); + const float luminanceDistance = std::abs(viewLuminance - medianLuminance); + + // if (colorDistance <= maxColorDeviation && + // luminanceDistance <= maxLuminanceDeviation) + if (luminanceDistance <= maxLuminanceDeviation) + { + validIndices.push_back(n); + } + } + + if (validIndices.empty()) { + + virtualFaceData.quality = medianQuality; + virtualFaceData.color = medianColor; + } + else { + // 使用过滤后的视图重新计算平均值 + float totalQuality2 = 0.0f; + Color totalColor2 = Color(0,0,0); + for (int idx : validIndices) { + totalQuality2 += validViews[idx].first; + totalColor2 += validViews[idx].second; + } + virtualFaceData.quality = totalQuality2 / validIndices.size(); + virtualFaceData.color = totalColor2 / validIndices.size(); + } + } + + // virtualFaceData.bInvalidFacesRelative = (invalidCount > 1); + // virtualFaceData.bInvalidFacesRelative = (invalidCount > processedFaces * 2 / 3); + } + ASSERT(!virtualFaceDatas.empty()); + } + virtualFacesDatas.emplace_back(std::move(virtualFaceDatas)); + virtualFaces.emplace_back(std::move(virtualFace)); + } while (!remainingFaces.empty()); + + return true; +} + +/** + * 计算图像亮度评分,专为纹理生成优化 + * @param imageData 输入图像(支持单通道和三通道) + * @return 亮度评分(0-1之间,越高表示亮度质量越好) + */ +float MeshTexture::CalculateBrightnessScore(const Image& imageData) const +{ + if (imageData.image.empty()) { + return 0.5f; // 默认中等评分 + } + + const cv::Mat& image = imageData.image; // 步骤1: 计算动态范围(整合自 ComputeDynamicRangeWithHist) + cv::Mat grayImage; + if (image.channels() > 1) { + cv::cvtColor(image, grayImage, cv::COLOR_BGR2GRAY); // 转为灰度图 + } else { + grayImage = image.clone(); + } + + // 计算直方图 + int histSize = 256; // 0-255的亮度值 + float range[] = {0, 256}; + const float* histRange = {range}; + cv::Mat hist; + cv::calcHist(&grayImage, 1, 0, cv::Mat(), hist, 1, &histSize, &histRange); + + // 动态范围:找到最小和最大非零亮度值 + float minVal = 255, maxVal = 0; + for (int i = 0; i < histSize; i++) { + if (hist.at(i) > 0) { + minVal = std::min(minVal, static_cast(i)); + maxVal = std::max(maxVal, static_cast(i)); + } + } + float dynamicRange = (maxVal - minVal) / 255.0f; // 归一化到[0,1] + + // 步骤2: 计算最终评分(整合自 CalculateFinalScore) + // 示例评分逻辑:结合动态范围和平均亮度 + cv::Scalar meanVal = cv::mean(grayImage); + float meanBrightness = meanVal[0] / 255.0f; // 归一化平均亮度 + + // 评分公式:动态范围和平均亮度的加权组合(权重可调整) + float finalScore = 0.6f * dynamicRange + 0.4f * meanBrightness; + + return finalScore; // 返回亮度分数 +} + +bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, std::vector& isVirtualFace, unsigned minCommonCameras, float thMaxNormalDeviation) const +{ + if (meshCurvatures.empty()) { + ComputeFaceCurvatures(); + } + + float thMaxColorDeviation = 130.0f; + + const float ratioAngleToQuality(0.67f); + const float cosMaxNormalDeviation(COS(FD2R(thMaxNormalDeviation))); + Mesh::FaceIdxArr remainingFaces(faces.size()); + std::iota(remainingFaces.begin(), remainingFaces.end(), 0); + std::vector selectedFaces(faces.size(), false); + cQueue currentVirtualFaceQueue; + std::unordered_set queuedFaces; + + // Precompute average color for each face + Colors faceColors; // 创建一个空列表 + faceColors.reserve(faces.size()); // 预分配空间(如果cList有reserve方法且您关心性能) + for (size_t i = 0; i < faces.size(); ++i) { + faceColors.push_back(Color::ZERO); // 逐个添加元素 + } + for (FIndex idxFace = 0; idxFace < faces.size(); ++idxFace) { + const FaceDataArr& faceDatas = facesDatas[idxFace]; + if (faceDatas.empty()) continue; + Color sumColor = Color::ZERO; + for (const FaceData& fd : faceDatas) { + sumColor += fd.color; + } + faceColors[idxFace] = sumColor / faceDatas.size(); + } + + do { + const FIndex startPos = RAND() % remainingFaces.size(); + const FIndex virtualFaceCenterFaceID = remainingFaces[startPos]; + + // 动态法线阈值 + const float centerCurvature = meshCurvatures[virtualFaceCenterFaceID]; + const float dynamicThreshold = (centerCurvature < 0.2f) ? 15.0f : 8.0f; // 曲率<0.2为平坦区域 + const float dynamicCosTh = COS(FD2R(dynamicThreshold)); + + ASSERT(currentVirtualFaceQueue.IsEmpty()); + const Normal& normalCenter = scene.mesh.faceNormals[virtualFaceCenterFaceID]; + const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID]; + + // 检查中心面片是否包含无效视图 + bool bHasInvalidView = false; + int nInvalidViewCount = 0; + int nTotalViewCount = 0; + for (const FaceData& faceData : centerFaceDatas) { + if (faceData.bInvalidFacesRelative) { + bHasInvalidView = true; + ++nInvalidViewCount; + // break; + } + ++nTotalViewCount; + } + + std::vector> sortedViews; + std::vector> sortedLuminViews; + std::vector> validViews; + sortedViews.reserve(centerFaceDatas.size()); + for (const FaceData& fd : centerFaceDatas) { + + if (fd.bInvalidFacesRelative) + { + // invalidView = fd.idxView; + // invalidQuality = fd.quality; + sortedViews.emplace_back(fd.quality, fd.color); + sortedLuminViews.emplace_back(MeshTexture::GetLuminance(fd.color), fd.color); + } + else + { + sortedViews.emplace_back(fd.quality, fd.color); + sortedLuminViews.emplace_back(MeshTexture::GetLuminance(fd.color), fd.color); + validViews.emplace_back(fd.quality, fd.color); + } + } + std::sort(sortedViews.begin(), sortedViews.end(), + [](const auto& a, const auto& b) { return a.first > b.first; }); + std::sort(validViews.begin(), validViews.end(), + [](const auto& a, const auto& b) { return a.first > b.first; }); + + int nSize = sortedViews.size(); + // int nSize = (sortedViews.size()>1) ? 1 : sortedViews.size(); + // 计算初始平均值 + float totalQuality = 0.0f; + Color totalColor(0,0,0); + for (int n = 0; n < nSize; ++n) { + totalQuality += sortedViews[n].first; + totalColor += sortedViews[n].second; + } + const float avgQuality = totalQuality / nSize; + const Color avgColor = totalColor / nSize; + + float totalLuminance = MeshTexture::GetLuminance(totalColor); + float avgLuminance = totalLuminance / nSize; + std::sort(sortedLuminViews.begin(), sortedLuminViews.end(), + [avgLuminance](const auto& a, const auto& b) { + float luminDistA = cv::norm(avgLuminance - a.first); + float luminDistB = cv::norm(avgLuminance - b.first); + return luminDistA < luminDistB; }); + + // select the common cameras + Mesh::FaceIdxArr virtualFace; + FaceDataArr virtualFaceDatas; + if (centerFaceDatas.empty()) { + virtualFace.emplace_back(virtualFaceCenterFaceID); + selectedFaces[virtualFaceCenterFaceID] = true; + const auto posToErase = remainingFaces.FindFirst(virtualFaceCenterFaceID); + ASSERT(posToErase != Mesh::FaceIdxArr::NO_INDEX); + remainingFaces.RemoveAtMove(posToErase); + } else { + IIndexArr selectedCams = SelectBestViews(centerFaceDatas, virtualFaceCenterFaceID, minCommonCameras, ratioAngleToQuality); + + /* + // 获取中心面片的法线 (注意变量名是 normalCenter, 不是 centerNormal) + const Normal& normalCenter = scene.mesh.faceNormals[virtualFaceCenterFaceID]; + // 过滤selectedCams:只保留夹角小于30度的视图 + IIndexArr filteredCams; // 用于存储过滤后的视图索引 + for (IIndex idxView : selectedCams) { + const Image& imageData = images[idxView]; + // 计算相机在世界坐标系中的朝向向量(相机镜面法线) + const RMatrix& R = imageData.camera.R; // 请根据 R 的实际类型调整,可能是 Matrix3x3f 或其他 + // 相机局部坐标系中的向前向量 (0,0,-1) + Point3f localForward(0.0f, 0.0f, -1.0f); + // 手动计算矩阵乘法:cameraForward = R * localForward + Point3f cameraForward; + cameraForward.x = R(0,0) * localForward.x + R(0,1) * localForward.y + R(0,2) * localForward.z; + cameraForward.y = R(1,0) * localForward.x + R(1,1) * localForward.y + R(1,2) * localForward.z; + cameraForward.z = R(2,0) * localForward.x + R(2,1) * localForward.y + R(2,2) * localForward.z; + + // 手动归一化 cameraForward(因为 Point3f 可能没有 normalize() 成员函数) + float norm = std::sqrt(cameraForward.x * cameraForward.x + + cameraForward.y * cameraForward.y + + cameraForward.z * cameraForward.z); + if (norm > 0.0f) { + cameraForward.x /= norm; + cameraForward.y /= norm; + cameraForward.z /= norm; + } else { + // 处理零向量的情况,赋予默认值 + cameraForward = Point3f(0, 0, -1); + } + + // 计算夹角余弦值 - 使用已声明的 normalCenter + // 假设 Normal 类型可以隐式转换为 Point3f,或进行显式转换 + Point3f normalPoint(normalCenter.x, normalCenter.y, normalCenter.z); // 显式转换示例 + float cosAngle = cameraForward.dot(normalPoint); // 使用正确的变量名 normalPoint(由 normalCenter 转换而来) + float angleDeg = std::acos(cosAngle) * 180.0f / M_PI; // 将弧度转换为角度 + + std::string strPath = imageData.name; + size_t lastSlash = strPath.find_last_of("/\\"); + if (lastSlash == std::string::npos) lastSlash = 0; // 若无分隔符,从头开始 + else lastSlash++; // 跳过分隔符 + + // 查找扩展名分隔符 '.' 的位置 + size_t lastDot = strPath.find_last_of('.'); + if (lastDot == std::string::npos) lastDot = strPath.size(); // 若无扩展名,截到末尾 + + // 截取文件名(不含路径和扩展名) + std::string strName = strPath.substr(lastSlash, lastDot - lastSlash); + + // printf("CreateVirtualFace %s, %d\n", strName.c_str(), virtualFaceCenterFaceID); + + if (!scene.is_face_delete_edge(strName, virtualFaceCenterFaceID)) + { + if (scene.is_face_edge(strName, virtualFaceCenterFaceID)) + { + // printf("CreateVirtualFace %s, %d, %f\n", strName.c_str(), virtualFaceCenterFaceID, angleLimit); + + if (angleDeg <= 45.0f) + { + filteredCams.push_back(idxView); + } + } + else + { + filteredCams.push_back(idxView); + } + } + } + + // 确保 selectedCams 是非 const 的,才能对其进行赋值 + // 例如,其声明应为:IIndexArr selectedCams = ...; (不能是 const IIndexArr) + if (filteredCams.empty()) { + // 处理所有视图都被过滤的情况... + // DEBUG_EXTRA("Warning: All views filtered for virtual face due to angle condition."); + + // selectedCams = SelectBestView(centerFaceDatas, virtualFaceCenterFaceID, minCommonCameras, ratioAngleToQuality); + selectedCams = filteredCams; + isVirtualFace[virtualFaceCenterFaceID] = false; + + } else { + selectedCams = filteredCams; + isVirtualFace[virtualFaceCenterFaceID] = true; + } + //*/ + + currentVirtualFaceQueue.AddTail(virtualFaceCenterFaceID); + queuedFaces.clear(); + do { + const FIndex currentFaceId = currentVirtualFaceQueue.GetHead(); + currentVirtualFaceQueue.PopHead(); + // check for condition to add in current virtual face + // normal angle smaller than thMaxNormalDeviation degrees + const Normal& faceNormal = scene.mesh.faceNormals[currentFaceId]; + const float cosFaceToCenter(ComputeAngleN(normalCenter.ptr(), faceNormal.ptr())); + // if (cosFaceToCenter < cosMaxNormalDeviation) + // continue; + if (cosFaceToCenter < dynamicCosTh) // 使用动态阈值 + continue; + // check if current face is seen by all cameras in selectedCams + ASSERT(!selectedCams.empty()); + if (!IsFaceVisible(facesDatas[currentFaceId], selectedCams)) + continue; + + // Check color similarity + const Color& centerColor = faceColors[virtualFaceCenterFaceID]; + const Color& currentColor = faceColors[currentFaceId]; + // if (cv::norm(centerColor) > 1e-5 && cv::norm(currentColor) > 1e-5) + { + float colorDistance = cv::norm(centerColor - currentColor); + // printf("1colorDistance=%f\n", colorDistance); + if (colorDistance > thMaxColorDeviation) { + // printf("2colorDistance=%f\n", colorDistance); + // continue; // Skip if color difference is too large + } + } + + /* + // #ifdef TEXOPT_USE_OPENMP + // #pragma omp critical + // #endif + // std::lock_guard lock(*scene.mesh.invalidFaces.mtx); + // if (scene.mesh.invalidFaces.data.find(currentFaceId) != scene.mesh.invalidFaces.data.end()) { + // continue; // 跳过无效面 + // } + + // 检查是否被所有选定相机有效看到 + if (!IsFaceVisibleAndValid(facesDatas[currentFaceId], selectedCams)) { + continue; + } + //*/ + + // remove it from remaining faces and add it to the virtual face + { + const auto posToErase = remainingFaces.FindFirst(currentFaceId); + ASSERT(posToErase != Mesh::FaceIdxArr::NO_INDEX); + remainingFaces.RemoveAtMove(posToErase); + selectedFaces[currentFaceId] = true; + virtualFace.push_back(currentFaceId); + } + // add all new neighbors to the queue + const Mesh::FaceFaces& ffaces = faceFaces[currentFaceId]; + for (int i = 0; i < 3; ++i) { + const FIndex fIdx = ffaces[i]; + if (fIdx == NO_ID) + continue; + if (!selectedFaces[fIdx] && queuedFaces.find(fIdx) == queuedFaces.end()) { + currentVirtualFaceQueue.AddTail(fIdx); + queuedFaces.emplace(fIdx); + } + } + } while (!currentVirtualFaceQueue.IsEmpty()); + + /* + if (selectedCams.empty()) { + const Color medianColor = ComputeMedianColorAndQuality(sortedViews).color; + const float medianQuality = ComputeMedianColorAndQuality(sortedViews).quality; + + FaceData& virtualFaceData = virtualFaceDatas.emplace_back(); + virtualFaceData.color = medianColor; + virtualFaceData.quality = medianQuality; + + } + */ + + // compute virtual face quality and create virtual face + for (IIndex idxView: selectedCams) { + FaceData& virtualFaceData = virtualFaceDatas.emplace_back(); + virtualFaceData.quality = 0; + virtualFaceData.idxView = idxView; + #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA + virtualFaceData.color = Point3f::ZERO; + #endif + int invalidQuality = 0; + Color invalidColor = Point3f::ZERO; + unsigned processedFaces(0); + bool bInvalidFacesRelative = false; + int invalidCount = 0; + for (FIndex fid : virtualFace) { + const FaceDataArr& faceDatas = facesDatas[fid]; + for (FaceData& faceData: faceDatas) { + /* + // if (faceData.idxView == idxView) { + if (faceData.idxView == idxView && !faceData.bInvalidFacesRelative) { + virtualFaceData.quality += faceData.quality; #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA virtualFaceData.color += faceData.color; #endif @@ -4496,208 +5539,6 @@ bool MeshTexture::CreateVirtualFaces7(FaceDataViewArr& facesDatas, FaceDataViewA return true; } -/* -void MeshTexture::CreateVirtualFaces6(FaceDataViewArr& facesDatas, FaceDataViewArr& virtualFacesDatas, VirtualFaceIdxsArr& virtualFaces, unsigned minCommonCameras, float thMaxNormalDeviation) const -{ - float thMaxColorDeviation = 0.000001f; - if (meshCurvatures.empty()) { - ComputeFaceCurvatures(); - } - - const float ratioAngleToQuality(0.67f); - const float cosMaxNormalDeviation(COS(FD2R(thMaxNormalDeviation))); - Mesh::FaceIdxArr remainingFaces(faces.size()); - std::iota(remainingFaces.begin(), remainingFaces.end(), 0); - std::vector selectedFaces(faces.size(), false); - cQueue currentVirtualFaceQueue; - std::unordered_set queuedFaces; - - // Precompute average color for each face - Colors faceColors; // 创建一个空列表 - faceColors.reserve(faces.size()); // 预分配空间(如果cList有reserve方法且您关心性能) - for (size_t i = 0; i < faces.size(); ++i) { - faceColors.push_back(Color::ZERO); // 逐个添加元素 - } - for (FIndex idxFace = 0; idxFace < faces.size(); ++idxFace) { - const FaceDataArr& faceDatas = facesDatas[idxFace]; - if (faceDatas.empty()) continue; - Color sumColor = Color::ZERO; - for (const FaceData& fd : faceDatas) { - sumColor += fd.color; - } - faceColors[idxFace] = sumColor / faceDatas.size(); - } - - do { - const FIndex startPos = RAND() % remainingFaces.size(); - const FIndex virtualFaceCenterFaceID = remainingFaces[startPos]; - - // 动态法线阈值 - const float centerCurvature = meshCurvatures[virtualFaceCenterFaceID]; - const float dynamicThreshold = (centerCurvature < 0.2f) ? 15.0f : 8.0f; // 曲率<0.2为平坦区域 - const float dynamicCosTh = COS(FD2R(dynamicThreshold)); - - ASSERT(currentVirtualFaceQueue.IsEmpty()); - const Normal& normalCenter = scene.mesh.faceNormals[virtualFaceCenterFaceID]; - const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID]; - - // 检查中心面片是否包含无效视图 - bool bHasInvalidView = false; - int nInvalidViewCount = 0; - int nTotalViewCount = 0; - for (const FaceData& faceData : centerFaceDatas) { - if (faceData.bInvalidFacesRelative) { - bHasInvalidView = true; - ++nInvalidViewCount; - } - ++nTotalViewCount; - } - - std::vector> sortedViews; - std::vector> sortedLuminViews; - std::vector> validViews; - sortedViews.reserve(centerFaceDatas.size()); - for (const FaceData& fd : centerFaceDatas) { - if (fd.bInvalidFacesRelative) { - sortedViews.emplace_back(fd.quality, fd.color); - sortedLuminViews.emplace_back(MeshTexture::GetLuminance(fd.color), fd.color); - } else { - sortedViews.emplace_back(fd.quality, fd.color); - sortedLuminViews.emplace_back(MeshTexture::GetLuminance(fd.color), fd.color); - validViews.emplace_back(fd.quality, fd.color); - } - } - std::sort(sortedViews.begin(), sortedViews.end(), - [](const auto& a, const auto& b) { return a.first > b.first; }); - std::sort(validViews.begin(), validViews.end(), - [](const auto& a, const auto& b) { return a.first > b.first; }); - - int nSize = sortedViews.size(); - // 计算初始平均值 - float totalQuality = 0.0f; - Color totalColor(0,0,0); - for (int n = 0; n < nSize; ++n) { - totalQuality += sortedViews[n].first; - totalColor += sortedViews[n].second; - } - const float avgQuality = totalQuality / nSize; - const Color avgColor = totalColor / nSize; - - float totalLuminance = MeshTexture::GetLuminance(totalColor); - float avgLuminance = totalLuminance / nSize; - std::sort(sortedLuminViews.begin(), sortedLuminViews.end(), - [avgLuminance](const auto& a, const auto& b) { - float luminDistA = cv::norm(avgLuminance - a.first); - float luminDistB = cv::norm(avgLuminance - b.first); - return luminDistA < luminDistB; }); - - // select the common cameras - Mesh::FaceIdxArr virtualFace; - FaceDataArr virtualFaceDatas; - if (centerFaceDatas.empty()) { - virtualFace.emplace_back(virtualFaceCenterFaceID); - selectedFaces[virtualFaceCenterFaceID] = true; - const auto posToErase = remainingFaces.FindFirst(virtualFaceCenterFaceID); - ASSERT(posToErase != Mesh::FaceIdxArr::NO_INDEX); - remainingFaces.RemoveAtMove(posToErase); - } else { - const IIndexArr selectedCams = SelectBestViews(centerFaceDatas, virtualFaceCenterFaceID, minCommonCameras, ratioAngleToQuality); - currentVirtualFaceQueue.AddTail(virtualFaceCenterFaceID); - queuedFaces.clear(); - do { - const FIndex currentFaceId = currentVirtualFaceQueue.GetHead(); - currentVirtualFaceQueue.PopHead(); - // check for condition to add in current virtual face - // normal angle smaller than thMaxNormalDeviation degrees - const Normal& faceNormal = scene.mesh.faceNormals[currentFaceId]; - const float cosFaceToCenter(ComputeAngleN(normalCenter.ptr(), faceNormal.ptr())); - if (cosFaceToCenter < dynamicCosTh) // 使用动态阈值 - continue; - // check if current face is seen by all cameras in selectedCams - ASSERT(!selectedCams.empty()); - if (!IsFaceVisible(facesDatas[currentFaceId], selectedCams)) - continue; - - // Check color similarity - const Color& centerColor = faceColors[virtualFaceCenterFaceID]; - const Color& currentColor = faceColors[currentFaceId]; - if (cv::norm(centerColor) > 1e-5 && cv::norm(currentColor) > 1e-5) { - float colorDistance = cv::norm(centerColor - currentColor); - if (colorDistance > thMaxColorDeviation) - { - continue; // Skip if color difference is too large - } - } - - // remove it from remaining faces and add it to the virtual face - { - const auto posToErase = remainingFaces.FindFirst(currentFaceId); - ASSERT(posToErase != Mesh::FaceIdxArr::NO_INDEX); - remainingFaces.RemoveAtMove(posToErase); - selectedFaces[currentFaceId] = true; - virtualFace.push_back(currentFaceId); - } - // add all new neighbors to the queue - const Mesh::FaceFaces& ffaces = faceFaces[currentFaceId]; - for (int i = 0; i < 3; ++i) { - const FIndex fIdx = ffaces[i]; - if (fIdx == NO_ID) - continue; - if (!selectedFaces[fIdx] && queuedFaces.find(fIdx) == queuedFaces.end()) { - currentVirtualFaceQueue.AddTail(fIdx); - queuedFaces.emplace(fIdx); - } - } - } while (!currentVirtualFaceQueue.IsEmpty()); - // compute virtual face quality and create virtual face - for (IIndex idxView: selectedCams) { - FaceData& virtualFaceData = virtualFaceDatas.emplace_back(); - virtualFaceData.quality = 0; - virtualFaceData.idxView = idxView; - #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA - virtualFaceData.color = Point3f::ZERO; - #endif - int invalidQuality = 0; - Color invalidColor = Point3f::ZERO; - unsigned processedFaces(0); - bool bInvalidFacesRelative = false; - int invalidCount = 0; - for (FIndex fid : virtualFace) { - const FaceDataArr& faceDatas = facesDatas[fid]; - for (FaceData& faceData: faceDatas) { - // 填充: 只处理当前视图的数据,累加质量和颜色 - if (faceData.idxView == idxView) { - virtualFaceData.quality += faceData.quality; - #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA - virtualFaceData.color += faceData.color; - #endif - processedFaces++; - if (faceData.bInvalidFacesRelative) { - invalidCount++; - } - break; // 每个面片每个视图只应有一个数据,找到后退出内层循环 - } - } - } - // 填充: 后处理,计算平均值和设置无效标志 - if (processedFaces > 0) { - virtualFaceData.quality /= processedFaces; - #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA - virtualFaceData.color /= processedFaces; - #endif - virtualFaceData.bInvalidFacesRelative = (invalidCount > processedFaces / 2); // 如果超过一半面片无效,则标记虚拟面无效 - } else { - // 如果没有找到任何数据,移除刚添加的virtualFaceData - virtualFaceDatas.pop_back(); - } - } - ASSERT(!virtualFaceDatas.empty()); - } - virtualFacesDatas.emplace_back(std::move(virtualFaceDatas)); - virtualFaces.emplace_back(std::move(virtualFace)); - } while (!remainingFaces.empty()); -} -*/ #if TEXOPT_FACEOUTLIER == TEXOPT_FACEOUTLIER_MEDIAN // decrease the quality of / remove all views in which the face's projection @@ -6129,7 +6970,9 @@ bool MeshTexture::FaceViewSelection3( unsigned minCommonCameras, float fOutlierT // CreateVirtualFaces(facesDatas, virtualFacesDatas, virtualFaces, minCommonCameras); // CreateVirtualFaces3(facesDatas, virtualFacesDatas, virtualFaces, minCommonCameras); // CreateVirtualFaces4(facesDatas, virtualFacesDatas, virtualFaces, mapFaceToVirtualFace, minCommonCameras); - CreateVirtualFaces6(facesDatas, virtualFacesDatas, virtualFaces, isVirtualFace, minCommonCameras); + // CreateVirtualFaces6(facesDatas, virtualFacesDatas, virtualFaces, isVirtualFace, minCommonCameras); + // CreateVirtualFaces61(facesDatas, virtualFacesDatas, virtualFaces, isVirtualFace, minCommonCameras); + CreateVirtualFaces62(facesDatas, virtualFacesDatas, virtualFaces, isVirtualFace, minCommonCameras); TD_TIMER_STARTD(); // CreateVirtualFaces7(facesDatas, virtualFacesDatas, virtualFaces, isVirtualFace, minCommonCameras); DEBUG_EXTRA("CreateVirtualFaces7 completed: %s", TD_TIMER_GET_FMT().c_str()); @@ -6145,7 +6988,7 @@ bool MeshTexture::FaceViewSelection3( unsigned minCommonCameras, float fOutlierT } } - // 标记使用逐面算法的面片 + // 标记使用逐面算法的面片 FOREACH(f, faces) { if (isVirtualFace[f]) { perFaceFaces.push_back(f); diff --git a/libs/MVS/mask_face_occlusion.py b/libs/MVS/mask_face_occlusion.py index 9bfba79..e5905fa 100755 --- a/libs/MVS/mask_face_occlusion.py +++ b/libs/MVS/mask_face_occlusion.py @@ -2174,6 +2174,8 @@ class ModelProcessor: result3: 删除边面字典,包含图像名称和对应的删除边面列表 base_path: 基础文件路径 """ + + os.makedirs(base_path, exist_ok = True) print(f"save_occlusion_data {base_path}, {len(result1)}, {len(result2)}, {len(result3)}") @@ -2199,7 +2201,7 @@ class ModelProcessor: # 保存 visible_faces_map try: - with open(base_path + "/_visible_faces_map.txt", "w", encoding='utf-8') as map_file: + with open(base_path + "_visible_faces_map.txt", "w", encoding='utf-8') as map_file: for image_name, face_set in visible_faces_map.items(): # 写入图像名称和所有面ID,用空格分隔 line = image_name + " " + " ".join(str(face) for face in face_set) + "\n" @@ -2209,7 +2211,7 @@ class ModelProcessor: # 保存 face_visible_relative try: - with open(base_path + "/_face_visible_relative.txt", "w", encoding='utf-8') as relative_file: + with open(base_path + "_face_visible_relative.txt", "w", encoding='utf-8') as relative_file: for face in face_visible_relative: relative_file.write(str(face) + "\n") except IOError as e: @@ -2217,7 +2219,7 @@ class ModelProcessor: # 保存 edge_faces_map try: - with open(base_path + "/_edge_faces_map.txt", "w", encoding='utf-8') as map_file2: + with open(base_path + "_edge_faces_map.txt", "w", encoding='utf-8') as map_file2: for image_name, face_set in edge_faces_map.items(): line = image_name + " " + " ".join(str(face) for face in face_set) + "\n" map_file2.write(line) @@ -2226,7 +2228,7 @@ class ModelProcessor: # 保存 delete_edge_faces_map try: - with open(base_path + "/_delete_edge_faces_map.txt", "w", encoding='utf-8') as map_file3: + with open(base_path + "_delete_edge_faces_map.txt", "w", encoding='utf-8') as map_file3: for image_name, face_set in delete_edge_faces_map.items(): line = image_name + " " + " ".join(str(face) for face in face_set) + "\n" map_file3.write(line)