Browse Source

处理色彩斑驳的问题

master
hesuicong 3 weeks ago
parent
commit
9e3c78c92e
  1. 461
      libs/MVS/SceneTexture.cpp

461
libs/MVS/SceneTexture.cpp

@ -4301,10 +4301,9 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView @@ -4301,10 +4301,9 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView
} else {
IIndexArr selectedCams = SelectBestViews(centerFaceDatas, virtualFaceCenterFaceID, minCommonCameras, ratioAngleToQuality);
//*
// 获取中心面片的法线 (注意变量名是 normalCenter, 不是 centerNormal)
const Normal& normalCenter = scene.mesh.faceNormals[virtualFaceCenterFaceID];
//*
// 过滤selectedCams:只保留夹角小于30度的视图
IIndexArr filteredCams; // 用于存储过滤后的视图索引
for (IIndex idxView : selectedCams) {
@ -4360,220 +4359,52 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView @@ -4360,220 +4359,52 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView
if (angleDeg <= 45.0f)
{
filteredCams.push_back(idxView);
// float brightnessScore = CalculateBrightnessScore(imageData); // 亮度评分函数
// float angleScore = 1.0f - (angleDeg / 45.0f);
// float qualityScore = 0.0f;
// const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID];
// for (const FaceData& fd : centerFaceDatas) {
// if (fd.idxView == idxView) {
// qualityScore = fd.quality;
// break;
// }
// }
// qualityScore = std::max(0.0f, std::min(1.0f, qualityScore));
// float overallScore = 0.5f * angleScore + 0.3f * brightnessScore + 0.2f * qualityScore;
// if (overallScore > 0.35f) {
// filteredCams.push_back(idxView);
// }
// filteredCams.push_back(idxView);
//*
float brightnessScore = CalculateBrightnessScore(imageData); // 亮度评分函数
float angleScore = 1.0f - (angleDeg / 45.0f);
float qualityScore = 0.0f;
const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID];
for (const FaceData& fd : centerFaceDatas) {
if (fd.idxView == idxView) {
qualityScore = fd.quality;
break;
}
}
qualityScore = std::max(0.0f, std::min(1.0f, qualityScore));
float overallScore = 0.5f * angleScore + 0.3f * brightnessScore + 0.2f * qualityScore;
if (overallScore > 0.2f) {
filteredCams.push_back(idxView);
}
//*/
}
}
else
{
filteredCams.push_back(idxView);
// float brightnessScore = CalculateBrightnessScore(imageData); // 亮度评分函数
// float angleScore = 1.0f - (angleDeg / 45.0f);
// float qualityScore = 0.0f;
// const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID];
// for (const FaceData& fd : centerFaceDatas) {
// if (fd.idxView == idxView) {
// qualityScore = fd.quality;
// break;
// }
// }
// qualityScore = std::max(0.0f, std::min(1.0f, qualityScore));
// float overallScore = 0.5f * angleScore + 0.3f * brightnessScore + 0.2f * qualityScore;
// if (overallScore > 0.35f) {
// filteredCams.push_back(idxView);
// }
}
}
}
//*/
/*
struct CameraInfo {
float brightness;
float angleDeg;
float qualityScore;
bool isEdge;
};
// 修改后的代码,使用亮度统计分布过滤异常值
IIndexArr filteredCams; // 用于存储过滤后的视图索引
// 步骤1: 收集所有候选相机的亮度值
std::vector<float> allBrightnessScores;
std::vector<std::pair<IIndex, CameraInfo>> cameraInfos; // 存储相机ID和相关信息
for (IIndex idxView : selectedCams) {
const Image& imageData = images[idxView];
// 计算相机朝向向量
const RMatrix& R = imageData.camera.R;
Point3f localForward(0.0f, 0.0f, -1.0f);
Point3f cameraForward;
cameraForward.x = R(0,0) * localForward.x + R(0,1) * localForward.y + R(0,2) * localForward.z;
cameraForward.y = R(1,0) * localForward.x + R(1,1) * localForward.y + R(1,2) * localForward.z;
cameraForward.z = R(2,0) * localForward.x + R(2,1) * localForward.y + R(2,2) * localForward.z;
// 归一化
float norm = std::sqrt(cameraForward.x * cameraForward.x +
cameraForward.y * cameraForward.y +
cameraForward.z * cameraForward.z);
if (norm > 0.0f) {
cameraForward.x /= norm;
cameraForward.y /= norm;
cameraForward.z /= norm;
} else {
cameraForward = Point3f(0, 0, -1);
}
// 计算夹角
Point3f normalPoint(normalCenter.x, normalCenter.y, normalCenter.z);
float cosAngle = cameraForward.dot(normalPoint);
float angleDeg = std::acos(cosAngle) * 180.0f / M_PI;
// 计算亮度分数
float brightnessScore = CalculateBrightnessScore(imageData);
allBrightnessScores.push_back(brightnessScore);
// 获取质量分数
float qualityScore = 0.0f;
const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID];
for (const FaceData& fd : centerFaceDatas) {
if (fd.idxView == idxView) {
qualityScore = fd.quality;
break;
}
}
qualityScore = std::max(0.0f, std::min(1.0f, qualityScore));
// 检查是否为边缘
std::string strPath = imageData.name;
size_t lastSlash = strPath.find_last_of("/\\");
if (lastSlash == std::string::npos) lastSlash = 0;
else lastSlash++;
size_t lastDot = strPath.find_last_of('.');
if (lastDot == std::string::npos) lastDot = strPath.size();
std::string strName = strPath.substr(lastSlash, lastDot - lastSlash);
bool isEdge = false;
if (!scene.is_face_delete_edge(strName, virtualFaceCenterFaceID)) {
isEdge = scene.is_face_edge(strName, virtualFaceCenterFaceID);
}
// 存储相机信息
cameraInfos.emplace_back(idxView, CameraInfo{
.brightness = brightnessScore,
.angleDeg = angleDeg,
.qualityScore = qualityScore,
.isEdge = isEdge
});
}
float medianBrightness = 0.0f;
// 步骤2: 计算亮度分布的统计特征
if (!allBrightnessScores.empty()) {
// 方法1: 使用中位数和MAD(绝对中位差) - 对异常值更鲁棒
std::vector<float> sortedBrightness = allBrightnessScores;
std::sort(sortedBrightness.begin(), sortedBrightness.end());
medianBrightness = sortedBrightness[sortedBrightness.size() / 2];
// 计算MAD
std::vector<float> deviations;
for (float brightness : allBrightnessScores) {
deviations.push_back(std::abs(brightness - medianBrightness));
}
std::sort(deviations.begin(), deviations.end());
float mad = deviations[deviations.size() / 2];
// 使用3倍MAD作为阈值(通常2-3倍MAD是常用阈值)
float lowerThreshold = medianBrightness - 2.5f * mad;
float upperThreshold = medianBrightness + 2.5f * mad;
// 方法2: 使用四分位距(IQR) - 备选方法
if (sortedBrightness.size() >= 4) {
int q1_index = sortedBrightness.size() / 4;
int q3_index = 3 * sortedBrightness.size() / 4;
float q1 = sortedBrightness[q1_index];
float q3 = sortedBrightness[q3_index];
float iqr = q3 - q1;
// 使用1.5倍IQR规则
float iqrLower = q1 - 1.5f * iqr;
float iqrUpper = q3 + 1.5f * iqr;
// 也可以结合两种方法
lowerThreshold = std::max(lowerThreshold, iqrLower);
upperThreshold = std::min(upperThreshold, iqrUpper);
}
// 步骤3: 根据统计特征过滤相机
for (const auto& [idxView, info] : cameraInfos) {
// 检查亮度是否在正常范围内
if (info.brightness < lowerThreshold || info.brightness > upperThreshold) {
// printf("过滤相机 %d: 亮度 %.3f 超出范围 [%.3f, %.3f]\n",
// idxView, info.brightness, lowerThreshold, upperThreshold);
continue;
}
// 根据是否为边缘面使用不同的参数
float angleLimit = info.isEdge ? 45.0f : 60.0f;
float scoreThreshold = info.isEdge ? 0.4f : 0.3f;
if (info.angleDeg <= angleLimit) {
float angleScore = 1.0f - (info.angleDeg / angleLimit);
float overallScore = 0.5f * angleScore + 0.3f * info.brightness + 0.2f * info.qualityScore;
// filteredCams.push_back(idxView);
if (overallScore > scoreThreshold) {
//*
float brightnessScore = CalculateBrightnessScore(imageData); // 亮度评分函数
float angleScore = 1.0f - (angleDeg / 45.0f);
float qualityScore = 0.0f;
const FaceDataArr& centerFaceDatas = facesDatas[virtualFaceCenterFaceID];
for (const FaceData& fd : centerFaceDatas) {
if (fd.idxView == idxView) {
qualityScore = fd.quality;
break;
}
}
qualityScore = std::max(0.0f, std::min(1.0f, qualityScore));
float overallScore = 0.5f * angleScore + 0.3f * brightnessScore + 0.2f * qualityScore;
if (overallScore > 0.2f) {
filteredCams.push_back(idxView);
}
//*/
}
}
}
// 如果没有足够的相机满足统计条件,可以放宽阈值或使用原始方法
if (filteredCams.size() < 3 && !cameraInfos.empty()) {
// 回退到简单方法:选择亮度最接近中位数的前N个相机
std::vector<std::pair<IIndex, float>> brightnessDifferences;
for (const auto& cameraInfoPair : cameraInfos) {
IIndex idxView = cameraInfoPair.first;
CameraInfo info = cameraInfoPair.second;
if (info.isEdge ? (info.angleDeg <= 45.0f) : (info.angleDeg <= 60.0f)) {
// 计算亮度与中位数的差异
float brightnessDiff = std::abs(info.brightness - medianBrightness);
brightnessDifferences.emplace_back(idxView, brightnessDiff);
}
}
// 按亮度差异排序,选择差异最小的
std::sort(brightnessDifferences.begin(), brightnessDifferences.end(),
[](const auto& a, const auto& b) { return a.second < b.second; });
int neededCount = std::min(5, (int)brightnessDifferences.size());
for (int i = 0; i < neededCount; i++) {
filteredCams.push_back(brightnessDifferences[i].first);
}
}
//*/
// 确保 selectedCams 是非 const 的,才能对其进行赋值
// 例如,其声明应为:IIndexArr selectedCams = ...; (不能是 const IIndexArr)
if (filteredCams.empty()) {
@ -4588,6 +4419,7 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView @@ -4588,6 +4419,7 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView
selectedCams = filteredCams;
isVirtualFace[virtualFaceCenterFaceID] = true;
}
//*/
currentVirtualFaceQueue.AddTail(virtualFaceCenterFaceID);
queuedFaces.clear();
@ -4641,6 +4473,7 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView @@ -4641,6 +4473,7 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView
}
} while (!currentVirtualFaceQueue.IsEmpty());
// compute virtual face quality and create virtual face
for (IIndex idxView: selectedCams) {
FaceData& virtualFaceData = virtualFaceDatas.emplace_back();
virtualFaceData.quality = 0;
@ -4648,30 +4481,218 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView @@ -4648,30 +4481,218 @@ bool MeshTexture::CreateVirtualFaces62(FaceDataViewArr& facesDatas, FaceDataView
#if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
virtualFaceData.color = Point3f::ZERO;
#endif
int invalidQuality = 0;
Color invalidColor = Point3f::ZERO;
unsigned processedFaces(0);
bool bInvalidFacesRelative = false;
int invalidCount = 0;
for (FIndex fid : virtualFace) {
const FaceDataArr& faceDatas = facesDatas[fid];
for (FaceData& faceData: faceDatas) {
if (faceData.idxView == idxView) {
virtualFaceData.quality += faceData.quality;
#if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
virtualFaceData.color += faceData.color;
#endif
++processedFaces;
if (faceData.bInvalidFacesRelative)
++invalidCount;
break;
int nViewCount = 0;
if (faceData.idxView == idxView)
{
for (const FaceData& fd : faceDatas)
{
if ( faceData.bInvalidFacesRelative)
{
++nViewCount;
}
}
// if (faceData.bInvalidFacesRelative)
if (bHasInvalidView)
{
// invalidQuality += faceData.quality;
// #if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
// invalidColor += faceData.color;
// #endif
++processedFaces;
}
else
{
// virtualFaceData.quality += faceData.quality;
#if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
// virtualFaceData.color += faceData.color;
#endif
++processedFaces;
// break;
}
}
}
}
ASSERT(processedFaces > 0);
virtualFaceData.quality /= processedFaces;
#if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
virtualFaceData.color /= processedFaces;
#endif
virtualFaceData.bInvalidFacesRelative = (invalidCount > processedFaces / 2);
float maxLuminance = 120.0f;
float minLuminance = 90.0f;
int validViewsSize = validViews.size();
// bHasInvalidView = true;
if (bHasInvalidView)
{
// 使用鲁棒的统计方法计算颜色和亮度的中心值
const Color medianColor = ComputeMedianColorAndQuality(sortedViews).color;
const float medianQuality = ComputeMedianColorAndQuality(sortedViews).quality;
const float medianLuminance = ComputeMedianLuminance(sortedViews);
// 计算颜色和亮度的绝对中位差(MAD)作为偏差阈值
const float colorMAD = ComputeColorMAD(sortedViews, medianColor);
const float luminanceMAD = ComputeLuminanceMAD(sortedViews, medianLuminance);
// 基于MAD设置动态阈值(3倍MAD是统计学上常用的异常值阈值)
const float maxColorDeviation = 0.01f * colorMAD;
const float maxLuminanceDeviation = 0.01f * luminanceMAD;
std::vector<int> validIndices;
for (int n = 0; n < sortedViews.size(); ++n) {
const Color& viewColor = sortedViews[n].second;
const float viewLuminance = MeshTexture::GetLuminance(viewColor);
const float colorDistance = cv::norm(viewColor - medianColor);
const float luminanceDistance = std::abs(viewLuminance - medianLuminance);
if (colorDistance <= maxColorDeviation &&
luminanceDistance <= maxLuminanceDeviation)
{
validIndices.push_back(n);
}
else
{
const FIndex currentFaceId = currentVirtualFaceQueue.GetHead();
const Normal& faceNormal = scene.mesh.faceNormals[currentFaceId];
const float cosFaceToCenter(ComputeAngleN(normalCenter.ptr(), faceNormal.ptr()));
bool bColorSimilarity = true;
// Check color similarity
const Color& centerColor = faceColors[virtualFaceCenterFaceID];
const Color& currentColor = faceColors[currentFaceId];
float colorDistance = cv::norm(centerColor - currentColor);
// printf("1colorDistance=%f\n", colorDistance);
if (colorDistance > thMaxColorDeviation) {
// printf("2colorDistance=%f\n", colorDistance);
bColorSimilarity = false;
}
// if ((cosFaceToCenter<dynamicCosTh) || !IsFaceVisible(facesDatas[currentFaceId], selectedCams))
if (cosFaceToCenter<dynamicCosTh)
{
if (nInvalidViewCount<=2)
validIndices.push_back(n);
else
{
// if ((colorDistance <= 350.0f))
validIndices.push_back(n);
}
}
else
{
if (nInvalidViewCount<=2)
validIndices.push_back(n);
else
{
// if (bColorSimilarity)
validIndices.push_back(n);
}
}
}
}
if (validIndices.empty()) {
for (int n = 0; n < sortedViews.size(); ++n) {
const Color& viewColor = sortedViews[n].second;
const float viewLuminance = MeshTexture::GetLuminance(viewColor);
const float colorDistance = cv::norm(viewColor - medianColor);
const float luminanceDistance = std::abs(viewLuminance - medianLuminance);
if (colorDistance <= maxColorDeviation)
{
// validIndices.push_back(n);
}
}
}
if (validIndices.empty()) {
for (int n = 0; n < sortedViews.size(); ++n) {
const Color& viewColor = sortedViews[n].second;
const float viewLuminance = MeshTexture::GetLuminance(viewColor);
const float colorDistance = cv::norm(viewColor - medianColor);
const float luminanceDistance = std::abs(viewLuminance - medianLuminance);
if (luminanceDistance <= maxLuminanceDeviation)
{
// validIndices.push_back(n);
}
}
}
{
ASSERT(processedFaces > 0);
// virtualFaceData.quality /= processedFaces;
#if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
// virtualFaceData.color /= processedFaces;
#endif
virtualFaceData.quality = 0;
#if TEXOPT_FACEOUTLIER != TEXOPT_FACEOUTLIER_NA
virtualFaceData.color = Point3f::ZERO;
#endif
}
}
else
{
// 使用鲁棒的统计方法计算颜色和亮度的中心值
const Color medianColor = ComputeMedianColorAndQuality(sortedViews).color;
const float medianQuality = ComputeMedianColorAndQuality(sortedViews).quality;
const float medianLuminance = ComputeMedianLuminance(sortedViews);
// 计算颜色和亮度的绝对中位差(MAD)作为偏差阈值
const float colorMAD = ComputeColorMAD(sortedViews, medianColor);
const float luminanceMAD = ComputeLuminanceMAD(sortedViews, medianLuminance);
// 基于MAD设置动态阈值(3倍MAD是统计学上常用的异常值阈值)
const float maxColorDeviation = 0.01f * colorMAD;
// const float maxLuminanceDeviation = 0.01f * luminanceMAD;
const float maxLuminanceDeviation = 0.05f * luminanceMAD;
std::vector<int> validIndices;
for (int n = 0; n < sortedViews.size(); ++n) {
const Color& viewColor = sortedViews[n].second;
const float viewLuminance = MeshTexture::GetLuminance(viewColor);
const float colorDistance = cv::norm(viewColor - medianColor);
const float luminanceDistance = std::abs(viewLuminance - medianLuminance);
// if (colorDistance <= maxColorDeviation &&
// luminanceDistance <= maxLuminanceDeviation)
// if (luminanceDistance <= maxLuminanceDeviation)
{
validIndices.push_back(n);
}
}
if (validIndices.empty()) {
virtualFaceData.quality = medianQuality;
virtualFaceData.color = medianColor;
}
else {
// 使用过滤后的视图重新计算平均值
float totalQuality2 = 0.0f;
Color totalColor2 = Color(0,0,0);
for (int idx : validIndices) {
totalQuality2 += validViews[idx].first;
totalColor2 += validViews[idx].second;
}
virtualFaceData.quality = totalQuality2 / validIndices.size();
virtualFaceData.color = totalColor2 / validIndices.size();
}
}
// virtualFaceData.bInvalidFacesRelative = (invalidCount > 1);
// virtualFaceData.bInvalidFacesRelative = (invalidCount > processedFaces * 2 / 3);
}
ASSERT(!virtualFaceDatas.empty());
}

Loading…
Cancel
Save