This commit is contained in:
2026-02-04 03:03:16 +00:00
commit 53a443d120
9 changed files with 2353 additions and 0 deletions

211
src/main.cpp Executable file
View File

@ -0,0 +1,211 @@
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2025, STEREOLABS.
//
// All rights reserved.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
// ZED include
#include "ClientPublisher.hpp"
#include "GLViewer.hpp"
#include <memory>
int main(int argc, char **argv) {
#ifdef _SL_JETSON_
const bool isJetson = true;
#else
const bool isJetson = false;
#endif
if (argc != 2) {
// this file should be generated by using the tool ZED360
std::cout << "Need a Configuration file in input" << std::endl;
return 1;
}
// Defines the Coordinate system and unit used in this sample
constexpr sl::COORDINATE_SYSTEM COORDINATE_SYSTEM =
sl::COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP;
constexpr sl::UNIT UNIT = sl::UNIT::METER;
// Read json file containing the configuration of your multicamera setup.
auto configurations =
sl::readFusionConfigurationFile(argv[1], COORDINATE_SYSTEM, UNIT);
if (configurations.empty()) {
std::cout << "Empty configuration File." << std::endl;
return EXIT_FAILURE;
}
Trigger trigger;
// Open camera streams from hard-coded configuration
std::vector<std::unique_ptr<ClientPublisher>> clients;
int id_ = 0;
int gpu_id = 0;
int nb_gpu = 0;
if (!isJetson)
cudaGetDeviceCount(&nb_gpu);
// Use hard-coded camera stream configurations
for (const auto& cam_config : CAMERA_STREAM_CONFIGS) {
std::cout << "Try to open stream for ZED " << cam_config.serial_number
<< " at " << cam_config.ip_address << ":" << cam_config.port << ".."
<< std::flush;
gpu_id = (nb_gpu == 0) ? 0 : id_ % nb_gpu;
auto client = std::make_unique<ClientPublisher>();
auto state = client->open(cam_config.ip_address, cam_config.port, &trigger, gpu_id);
if (!state) {
std::cerr << "Could not open stream for ZED: " << cam_config.serial_number
<< ". Skipping..." << std::endl;
continue;
}
std::cout << ". ready ! (Serial: " << client->getSerialNumber() << ")" << std::endl;
clients.push_back(std::move(client));
id_++;
}
// start camera threads
for (auto &it : clients) {
it->start();
}
// Now that the ZED camera are running, we need to initialize the fusion
// module
sl::InitFusionParameters init_params;
init_params.coordinate_units = UNIT;
init_params.coordinate_system = COORDINATE_SYSTEM;
sl::Resolution low_res(512, 360);
init_params.maximum_working_resolution = low_res;
// create and initialize it
sl::Fusion fusion;
fusion.init(init_params);
// subscribe to every cameras of the setup to internally gather their data
std::vector<sl::CameraIdentifier> cameras;
for (auto &it : configurations) {
sl::CameraIdentifier uuid(it.serial_number);
// to subscribe to a camera you must give its serial number, the way to
// communicate with it (shared memory or local network), and its world pose
// in the setup.
auto state = fusion.subscribe(uuid, it.communication_parameters, it.pose,
it.override_gravity);
if (state != sl::FUSION_ERROR_CODE::SUCCESS)
std::cout << "Unable to subscribe to " << std::to_string(uuid.sn) << " . "
<< state << std::endl;
else
cameras.push_back(uuid);
}
// check that at least one camera is connected
if (cameras.empty()) {
std::cout << "no connections " << std::endl;
return EXIT_FAILURE;
}
// as this sample shows how to fuse body detection from the multi camera setup
// we enable the Body Tracking module with its options
sl::BodyTrackingFusionParameters body_fusion_init_params;
body_fusion_init_params.enable_tracking = true;
body_fusion_init_params.enable_body_fitting =
!isJetson; // skeletons will looks more natural but requires more
// computations
fusion.enableBodyTracking(body_fusion_init_params);
// define fusion behavior
sl::BodyTrackingFusionRuntimeParameters body_tracking_runtime_parameters;
// be sure that the detection skeleton is complete enough
body_tracking_runtime_parameters.skeleton_minimum_allowed_keypoints = 7;
// we can also want to retrieve skeleton seen by multiple camera, in this case
// at least half of them
body_tracking_runtime_parameters.skeleton_minimum_allowed_camera =
cameras.size() / 2.;
body_tracking_runtime_parameters.skeleton_smoothing = 0.1f;
// creation of a 3D viewer
GLViewer viewer;
viewer.init(argc, argv);
std::cout << "Viewer Shortcuts\n"
<< "\t- 'q': quit the application\n"
<< "\t- 'r': switch on/off for raw skeleton display\n"
<< "\t- 'p': switch on/off for live point cloud display\n"
<< "\t- 'c': switch on/off point cloud display with raw color\n"
<< std::endl;
// fusion outputs
sl::Bodies fused_bodies;
std::map<sl::CameraIdentifier, sl::Bodies> camera_raw_data;
sl::FusionMetrics metrics;
std::map<sl::CameraIdentifier, sl::Mat> views;
std::map<sl::CameraIdentifier, sl::Mat> pointClouds;
sl::CameraIdentifier fused_camera(0);
// run the fusion as long as the viewer is available.
while (viewer.isAvailable()) {
trigger.notifyZED();
// run the fusion process (which gather data from all camera, sync them and
// process them)
if (fusion.process() == sl::FUSION_ERROR_CODE::SUCCESS) {
// Retrieve fused body
fusion.retrieveBodies(fused_bodies, body_tracking_runtime_parameters);
// for debug, you can retrieve the data sent by each camera
for (auto &id : cameras) {
fusion.retrieveBodies(camera_raw_data[id],
body_tracking_runtime_parameters, id);
auto state_view = fusion.retrieveImage(views[id], id, low_res);
auto state_pc = fusion.retrieveMeasure(pointClouds[id], id,
sl::MEASURE::XYZBGRA, low_res);
if (state_view == sl::FUSION_ERROR_CODE::SUCCESS &&
state_pc == sl::FUSION_ERROR_CODE::SUCCESS)
viewer.updateCamera(id.sn, views[id], pointClouds[id]);
sl::Pose pose;
if (fusion.getPosition(pose, sl::REFERENCE_FRAME::WORLD, id,
sl::POSITION_TYPE::RAW) ==
sl::POSITIONAL_TRACKING_STATE::OK)
viewer.setCameraPose(id.sn, pose.pose_data);
}
// get metrics about the fusion process for monitoring purposes
fusion.getProcessMetrics(metrics);
}
// update the 3D view
viewer.updateBodies(fused_bodies, camera_raw_data, metrics);
}
trigger.running = false;
trigger.notifyZED();
for (auto &it : clients)
it->stop();
fusion.close();
return EXIT_SUCCESS;
}