Replaced undistortion with std::vectors as well.

This commit is contained in:
Daniel
2025-02-27 13:06:08 +01:00
parent 63e00e9b13
commit 608f89d6b6
2 changed files with 224 additions and 149 deletions

View File

@ -210,7 +210,6 @@ std::vector<std::vector<std::array<float, 4>>> TriangulatorInternal::triangulate
stime = std::chrono::steady_clock::now();
// Undistort 2D poses
#pragma omp parallel for
for (size_t i = 0; i < cameras.size(); ++i)
{
undistort_poses(i_poses_2d[i], internal_cameras[i]);
@ -604,6 +603,84 @@ void TriangulatorInternal::print_stats()
// =================================================================================================
void undistort_point_pinhole(std::array<float, 3> &p, const std::vector<float> &k)
{
// Use distortion coefficients: [k1, k2, p1, p2, k3]
// https://github.com/opencv/opencv/blob/4.x/modules/calib3d/src/undistort.dispatch.cpp#L432
float x0 = p[0];
float y0 = p[1];
float x = x0;
float y = y0;
// Iteratively refine the estimate for the undistorted point.
int max_iterations = 5;
for (int iter = 0; iter < max_iterations; ++iter)
{
float r2 = x * x + y * y;
double icdist = 1.0 / (1 + ((k[4] * r2 + k[1]) * r2 + k[0]) * r2);
if (icdist < 0)
{
x = x0;
y = y0;
break;
}
float deltaX = 2 * k[2] * x * y + k[3] * (r2 + 2 * x * x);
float deltaY = k[2] * (r2 + 2 * y * y) + 2 * k[3] * x * y;
x = (x0 - deltaX) * icdist;
y = (y0 - deltaY) * icdist;
}
p[0] = x;
p[1] = y;
}
void undistort_point_fisheye(std::array<float, 3> &p, const std::vector<float> &k)
{
// Use distortion coefficients: [k1, k2, k3, k4]
// https://github.com/opencv/opencv/blob/4.x/modules/calib3d/src/fisheye.cpp#L429
float theta_d = std::sqrt(p[0] * p[0] + p[1] * p[1]);
float pi_half = std::numbers::pi * 0.5;
theta_d = std::min(std::max(-pi_half, theta_d), pi_half);
if (theta_d < 1e-6)
{
return;
}
float scale = 0.0;
float theta = theta_d;
int max_iterations = 5;
for (int iter = 0; iter < max_iterations; ++iter)
{
float theta2 = theta * theta;
float theta4 = theta2 * theta2;
float theta6 = theta4 * theta2;
float theta8 = theta4 * theta4;
float k0_theta2 = k[0] * theta2;
float k1_theta4 = k[1] * theta4;
float k2_theta6 = k[2] * theta6;
float k3_theta8 = k[3] * theta8;
float theta_fix = (theta * (1 + k0_theta2 + k1_theta4 + k2_theta6 + k3_theta8) - theta_d) /
(1 + 3 * k0_theta2 + 5 * k1_theta4 + 7 * k2_theta6 + 9 * k3_theta8);
theta = theta - theta_fix;
if (std::fabs(theta_fix) < 1e-6)
{
break;
}
}
scale = std::tan(theta) / theta_d;
p[0] *= scale;
p[1] *= scale;
}
void TriangulatorInternal::undistort_poses(
std::vector<std::vector<std::array<float, 3>>> &poses_2d, CameraInternal &icam)
{
@ -624,44 +701,42 @@ void TriangulatorInternal::undistort_poses(
icam.K, icam.DC, cv::Size(width, height), 1, cv::Size(width, height));
}
// Convert vectors to single mat
size_t num_persons = poses_2d.size();
size_t num_joints = poses_2d[0].size();
std::vector<int> dims = {(int)(num_persons * num_joints), 2};
cv::Mat points_mat = cv::Mat(dims, CV_32F);
float *mat_ptr = points_mat.ptr<float>(0);
for (size_t i = 0; i < num_persons; ++i)
{
for (size_t j = 0; j < num_joints; ++j)
{
for (size_t k = 0; k < 2; ++k)
{
mat_ptr[i * num_joints * 2 + j * 2 + k] = poses_2d[i][j][k];
}
}
}
float ifx_old = 1.0 / icam.cam.K[0][0];
float ify_old = 1.0 / icam.cam.K[1][1];
float cx_old = icam.cam.K[0][2];
float cy_old = icam.cam.K[1][2];
float fx_new = newK.at<float>(0, 0);
float fy_new = newK.at<float>(1, 1);
float cx_new = newK.at<float>(0, 2);
float cy_new = newK.at<float>(1, 2);
// Undistort all the points
points_mat = points_mat.reshape(2, static_cast<int>(points_mat.rows));
if (icam.cam.type == "fisheye")
{
cv::fisheye::undistortPoints(points_mat, points_mat, icam.K, icam.DC, cv::noArray(), newK);
}
else
{
cv::undistortPoints(points_mat, points_mat, icam.K, icam.DC, cv::noArray(), newK);
}
points_mat = points_mat.reshape(1, static_cast<int>(points_mat.rows));
// Overwrite the old coordinates with the undistorted ones
size_t num_persons = poses_2d.size();
size_t num_joints = poses_2d[0].size();
for (size_t i = 0; i < num_persons; ++i)
{
for (size_t j = 0; j < num_joints; ++j)
{
for (size_t k = 0; k < 2; ++k)
// Normalize the point using the original camera matrix
poses_2d[i][j][0] = (poses_2d[i][j][0] - cx_old) * ifx_old;
poses_2d[i][j][1] = (poses_2d[i][j][1] - cy_old) * ify_old;
// Undistort
// Using own implementation is faster than using OpenCV, because it avoids the
// overhead of creating cv::Mat objects and further unnecessary calculations for
// additional distortion parameters and identity rotations in this usecase.
if (icam.cam.type == "fisheye")
{
poses_2d[i][j][k] = mat_ptr[i * num_joints * 2 + j * 2 + k];
undistort_point_fisheye(poses_2d[i][j], icam.cam.DC);
}
else
{
undistort_point_pinhole(poses_2d[i][j], icam.cam.DC);
}
// Map the undistorted normalized point to the new image coordinates
poses_2d[i][j][0] = (poses_2d[i][j][0] * fx_new) + cx_new;
poses_2d[i][j][1] = (poses_2d[i][j][1] * fy_new) + cy_new;
}
}