Scale 3d score with 2d scores.

This commit is contained in:
Daniel
2025-03-20 17:00:35 +01:00
parent 6ed9326060
commit 4c214e1935
4 changed files with 1829 additions and 1824 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1429,8 +1429,12 @@ std::pair<std::vector<std::array<float, 4>>, float> TriangulatorInternal::triang
{ {
if (mask[i]) if (mask[i])
{ {
float score = 0.5 * (score1[i] + score2[i]); float scoreT = 0.5 * (score1[i] + score2[i]);
pose3d[i][3] = score; float scoreP = 0.5 * (pose1[i][2] + pose2[i][2]);
// Since the triangulation score is less sensitive and generally higher,
// weight it stronger to balance the two scores.
pose3d[i][3] = 0.9 * scoreT + 0.1 * scoreP;
} }
} }

View File

@ -41,7 +41,7 @@ default_min_bbox_score = 0.3
# Describes how good two 2D poses need to match each other to create a valid triangulation # Describes how good two 2D poses need to match each other to create a valid triangulation
# If the quality of the 2D detections is poor, use a lower value # If the quality of the 2D detections is poor, use a lower value
default_min_match_score = 0.94 default_min_match_score = 0.91
# Describes the minimum number of camera pairs that need to detect the same person # Describes the minimum number of camera pairs that need to detect the same person
# If the number of cameras is high, and the views are not occluded, use a higher value # If the number of cameras is high, and the views are not occluded, use a higher value
@ -55,7 +55,7 @@ datasets = {
"human36m": { "human36m": {
"path": "/datasets/human36m/skelda/pose_test.json", "path": "/datasets/human36m/skelda/pose_test.json",
"take_interval": 5, "take_interval": 5,
"min_match_score": 0.95, "min_match_score": 0.92,
"min_group_size": 1, "min_group_size": 1,
"min_bbox_score": 0.4, "min_bbox_score": 0.4,
"min_bbox_area": 0.1 * 0.1, "min_bbox_area": 0.1 * 0.1,
@ -68,7 +68,7 @@ datasets = {
# "cams": ["00_03", "00_06", "00_12", "00_13", "00_23", "00_15", "00_10", "00_21", "00_09", "00_01"], # "cams": ["00_03", "00_06", "00_12", "00_13", "00_23", "00_15", "00_10", "00_21", "00_09", "00_01"],
# "cams": [], # "cams": [],
"take_interval": 3, "take_interval": 3,
"min_match_score": 0.95, "min_match_score": 0.92,
"use_scenes": ["160906_pizza1", "160422_haggling1", "160906_ian5"], "use_scenes": ["160906_pizza1", "160422_haggling1", "160906_ian5"],
"min_group_size": 1, "min_group_size": 1,
# "min_group_size": 4, # "min_group_size": 4,
@ -79,25 +79,25 @@ datasets = {
"path": "/datasets/mvor/skelda/all.json", "path": "/datasets/mvor/skelda/all.json",
"take_interval": 1, "take_interval": 1,
"with_depth": False, "with_depth": False,
"min_match_score": 0.85, "min_match_score": 0.80,
"min_bbox_score": 0.25, "min_bbox_score": 0.25,
}, },
"campus": { "campus": {
"path": "/datasets/campus/skelda/test.json", "path": "/datasets/campus/skelda/test.json",
"take_interval": 1, "take_interval": 1,
"min_match_score": 0.92, "min_match_score": 0.89,
"min_bbox_score": 0.5, "min_bbox_score": 0.5,
}, },
"shelf": { "shelf": {
"path": "/datasets/shelf/skelda/test.json", "path": "/datasets/shelf/skelda/test.json",
"take_interval": 1, "take_interval": 1,
"min_match_score": 0.95, "min_match_score": 0.92,
"min_group_size": 2, "min_group_size": 2,
}, },
"ikeaasm": { "ikeaasm": {
"path": "/datasets/ikeaasm/skelda/test.json", "path": "/datasets/ikeaasm/skelda/test.json",
"take_interval": 2, "take_interval": 2,
"min_match_score": 0.92, "min_match_score": 0.89,
"min_bbox_score": 0.20, "min_bbox_score": 0.20,
}, },
"chi3d": { "chi3d": {
@ -107,20 +107,21 @@ datasets = {
"tsinghua": { "tsinghua": {
"path": "/datasets/tsinghua/skelda/test.json", "path": "/datasets/tsinghua/skelda/test.json",
"take_interval": 3, "take_interval": 3,
"min_match_score": 0.95, "min_match_score": 0.92,
"min_group_size": 2, "min_group_size": 2,
}, },
"human36m_wb": { "human36m_wb": {
"path": "/datasets/human36m/skelda/wb/test.json", "path": "/datasets/human36m/skelda/wb/test.json",
"take_interval": 100, "take_interval": 100,
"min_bbox_score": 0.4, "min_bbox_score": 0.4,
"min_match_score": 0.93,
"batch_poses": False, "batch_poses": False,
}, },
"egohumans_tagging": { "egohumans_tagging": {
"path": "/datasets/egohumans/skelda/all.json", "path": "/datasets/egohumans/skelda/all.json",
"take_interval": 2, "take_interval": 2,
"subset": "tagging", "subset": "tagging",
"min_match_score": 0.92, "min_match_score": 0.89,
"min_group_size": 2, "min_group_size": 2,
"min_bbox_score": 0.2, "min_bbox_score": 0.2,
"min_bbox_area": 0.05 * 0.05, "min_bbox_area": 0.05 * 0.05,

View File

@ -19,7 +19,7 @@ whole_body = {
"hands": False, "hands": False,
} }
config = { config = {
"min_match_score": 0.94, "min_match_score": 0.91,
"min_group_size": 1, "min_group_size": 1,
"min_bbox_score": 0.3, "min_bbox_score": 0.3,
"min_bbox_area": 0.1 * 0.1, "min_bbox_area": 0.1 * 0.1,