#include "ClientPublisher.hpp" ClientPublisher::ClientPublisher() { } ClientPublisher::~ClientPublisher() { zed.close(); } bool ClientPublisher::open(sl::InputType input, Trigger* ref, int sdk_gpu_id) { p_trigger = ref; sl::InitParameters init_parameters; init_parameters.depth_mode = sl::DEPTH_MODE::NEURAL_PLUS; init_parameters.input = input; init_parameters.coordinate_units = sl::UNIT::METER; init_parameters.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; init_parameters.camera_resolution = sl::RESOLUTION::HD1080; init_parameters.camera_fps = 30; init_parameters.depth_stabilization = 30; init_parameters.sdk_gpu_id = sdk_gpu_id; init_parameters.depth_minimum_distance = 1.5f; init_parameters.depth_maximum_distance = 10.0f; auto state = zed.open(init_parameters); if (state != sl::ERROR_CODE::SUCCESS) { std::cout << "Error: " << state << std::endl; return false; } serial = zed.getCameraInformation().serial_number; p_trigger->states[serial] = false; // in most cases in body tracking setup, the cameras are static sl::PositionalTrackingParameters positional_tracking_parameters; // in most cases for body detection application the camera is static: positional_tracking_parameters.set_as_static = true; state = zed.enablePositionalTracking(positional_tracking_parameters); if (state != sl::ERROR_CODE::SUCCESS) { std::cout << "Error: " << state << std::endl; return false; } // define the body tracking parameters, as the fusion can does the tracking and fitting you don't need to enable them here, unless you need it for your app sl::BodyTrackingParameters body_tracking_parameters; body_tracking_parameters.detection_model = sl::BODY_TRACKING_MODEL::HUMAN_BODY_ACCURATE; body_tracking_parameters.body_format = sl::BODY_FORMAT::BODY_18; body_tracking_parameters.enable_body_fitting = false; body_tracking_parameters.enable_tracking = false; state = zed.enableBodyTracking(body_tracking_parameters); if (state != sl::ERROR_CODE::SUCCESS) { std::cout << "Error: " << state << std::endl; return false; } return true; } void ClientPublisher::start() { if (zed.isOpened()) { // the camera should stream its data so the fusion can subscibe to it to gather the detected body and others metadata needed for the process. zed.startPublishing(); // the thread can start to process the camera grab in background runner = std::thread(&ClientPublisher::work, this); } } void ClientPublisher::stop() { if (runner.joinable()) runner.join(); zed.close(); } void ClientPublisher::work() { sl::Bodies bodies; sl::BodyTrackingRuntimeParameters body_runtime_parameters; body_runtime_parameters.detection_confidence_threshold = 40; zed.setBodyTrackingRuntimeParameters(body_runtime_parameters); sl::RuntimeParameters rt; rt.confidence_threshold = 30; while (p_trigger->running) { std::unique_lock lk(mtx); p_trigger->cv.wait(lk); if (p_trigger->running) { if (zed.grab(rt) == sl::ERROR_CODE::SUCCESS) { } } p_trigger->states[serial] = true; } }