Code
-------------------------------------------------------code-------------------------------------------------------------
// Standard includes
#include
#include
#include
#include
#include
#include
// Flag to disable the GUI to increase detection performances
// On low-end hardware such as Jetson Nano, the GUI significantly slows
// down the detection and increase the memory consumption
#define ENABLE_GUI 1
// ZED includes
#include <sl/Camera.hpp>
#include <cuda.h>
// Sample includes
#if ENABLE_GUI
#include <opencv2/imgcodecs.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/stitching.hpp>
#include <opencv2/core/cuda.hpp>
#include <opencv2/imgproc.hpp>
#endif
// Using std and sl namespaces
using namespace std;
using namespace sl;
bool is_playback = false;
void print(string msg_prefix, ERROR_CODE err_code = ERROR_CODE::SUCCESS, string msg_suffix = “”);
void parseArgs(int argc, char **argv, InitParameters& param);
bool checkIsJetson();
template string tostr(const T& t);
string type2str(int type);
void MatType( Mat inputMat );
bool beforeImageFlag = false ;
vector< string> split(const string& s, char seperator) {
vector< string> output;
string::size_type prev_pos = 0, pos = 0;
while ((pos = s.find(seperator, pos)) != string::npos) {
string substring(s.substr(prev_pos, pos - prev_pos));
output.push_back(substring);
prev_pos = ++pos;
}
output.push_back(s.substr(prev_pos, pos - prev_pos));
return output;
}
//---------------------------streaming function-------------------------------------------//
void setStreamParameter(InitParameters& init_p, string& argument) {
vector< string> configStream = split(argument, ‘:’);
String ip(configStream.at(0).c_str());
if (configStream.size() == 2) {
init_p.input.setFromStream(ip, atoi(configStream.at(1).c_str()));
} else init_p.input.setFromStream(ip);
}
//-----------------------------main start------------------------------------------------//
int main(int argc, char **argv) {
// Create ZED objects
Camera zed;
//-----------------------------Zed camera setting--------------------------------------------//
InitParameters init_parameters;
init_parameters.camera_resolution = RESOLUTION::HD720;
// init_parameters.coordinate_units = UNIT::METER;
init_parameters.camera_fps = 30 ;
if (checkIsJetson())
// On Jetson (Nano, TX1/2) the object detection combined with an heavy depth mode could reduce the frame rate too much
init_parameters.depth_mode = DEPTH_MODE::PERFORMANCE;
else
init_parameters.depth_mode = DEPTH_MODE::ULTRA;
init_parameters.depth_maximum_distance = 50.0f * 1000.0f;
init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; // OpenGL's coordinate system is right_handed
init_parameters.sdk_verbose = true;
init_parameters.sensors_required = true;
parseArgs(argc, argv, init_parameters);
//----------------------------streaming parameter receive-------------------------------------//
string stream_params;
if (argc > 1) {
stream_params = string(argv[1]);
}else {
cout << “Usage : ./ImgReceiver IP:[port]\n”;
cin >> stream_params;
}
setStreamParameter(init_parameters, stream_params);
// Open the camera
ERROR_CODE zed_open_state = zed.open(init_parameters);
if (zed_open_state != ERROR_CODE::SUCCESS) {
print("Camera Open", zed_open_state, "Exit program.");
return EXIT_FAILURE;
}
auto camera_infos = zed.getCameraInformation();
PositionalTrackingParameters tracking_parameters;
tracking_parameters.enable_area_memory = false;
ERROR_CODE zed_tracking_state = zed.enablePositionalTracking(tracking_parameters);
if (zed_tracking_state !=ERROR_CODE::SUCCESS){
cout << "ERROR" << zed_tracking_state << ", exit program.\n";
return EXIT_FAILURE;
}
bool quit = false;
#if ENABLE_GUI
auto res = camera_infos.camera_configuration.resolution;
Resolution display_resolution(min((int)res.width, 1280) , min((int)res.height, 720));
Mat image_left(display_resolution, MAT_TYPE::U8_C4);
char key = ' ';
auto camera_parameters = zed.getCameraInformation(display_resolution).camera_configuration.calibration_parameters.left_cam;
#endif
sl::RuntimeParameters runtime_parameters;
runtime_parameters.confidence_threshold = 50; //50;
Pose cam_pose;
cam_pose.pose_data.setIdentity();
cv::VideoWriter writer;
cv::Mat outputMat;
cv::Mat outputRotate ;
sl::Resolution resolution = zed.getCameraInformation().camera_configuration.resolution ;
UnionMat image( resolution, MAT_TYPE::U8_C4, MEM::GPU);
cv::cuda::GpuMat result ;
cv::cuda::GpuMat result_image ;
//----------------------------------------------gstreamer start-----------------------------------------------//
writer.open("appsrc ! videoconvert ! "
"omxh264enc control-rate=2 bitrate=4000000 ! video/x-h264, stream-format=byte-stream ! "
“queue leaky=2 ! tcpserversink port=5000 host=0.0.0.0 recover-policy=keyframe”, 0, (double)30, cv::Size(1280,720), true);
// sensors data
SensorsData sensors_data ;
SensorsData::BarometerData barometer_data ;
SensorsData::IMUData imu_data ;
POSITIONAL_TRACKING_STATE tracking_state = POSITIONAL_TRACKING_STATE::OFF;
double fps = zed.getCurrentFPS() ;
std::chrono::system_clock::time_point StartTime_Period = std::chrono::system_clock::now() ; // period time start
Timestamp last_image_timestamp ;
sl::float3 translation ;
sl::float3 translation2 ;
sl::float3 rotation ;
sl::float3 rotation2 ;
while (!quit ) {
if (zed.grab(runtime_parameters) != ERROR_CODE::SUCCESS) {
zed.reboot(0);
}
//---------------------camera pose, sensor data -------------------------------------------------//
tracking_state = zed.getPosition( cam_pose, REFERENCE_FRAME::WORLD);
zed.getSensorsData( sensors_data, TIME_REFERENCE::IMAGE); //sensordata
barometer_data = sensors_data.barometer ; //sensor data
float pressure = barometer_data.pressure ; //pressure
imu_data = sensors_data.imu;
float relative_altitude = barometer_data.relative_altitude ; //altitude
sl::float3 linear_acceleration = imu_data.linear_acceleration;
sl::float3 angular_velocity = imu_data.angular_velocity ;
std::chrono::system_clock::time_point StartTime_latency = std::chrono::system_clock::now() ; // latency start
auto zed_translation = cam_pose.pose_data.getTranslation();
{
#if ENABLE_GUI
zed.retrieveImage(image_left, VIEW::LEFT, MEM::CPU , display_resolution);
zed.retrieveImage(image.getZED(), VIEW::LEFT, MEM::GPU);
#else
#endif
}
if (is_playback && zed.getSVOPosition() == zed.getSVONumberOfFrames()) {
quit = true;
}
#if ENABLE_GUI
if (zed.getSensorsData(sensors_data, TIME_REFERENCE::IMAGE) == sl::ERROR_CODE::SUCCESS){
translation.x = sensors_data.imu.pose.getTranslation().x;
translation.y = sensors_data.imu.pose.getTranslation().y;
translation.z = sensors_data.imu.pose.getTranslation().z;
rotation.x = sensors_data.imu.pose.getEulerAngles().x;
rotation.y = sensors_data.imu.pose.getEulerAngles().y;
rotation.z = sensors_data.imu.pose.getEulerAngles().z;
if((isnan( translation.x)!= 0 ) || isinf(translation.x) ){ translation.x = 0.f ; }
if((isnan( translation.y)!= 0 ) || isinf(translation.y) ){ translation.y = 0.f ; }
if((isnan( translation.z)!= 0 ) || isinf(translation.y) ){ translation.z = 0.f ; }
if(( isnan( rotation.x ) !=0)){ rotation.x = 0.00f ; }
if(( isnan( rotation.y ) !=0)){ rotation.y = 0.00f ; }
if(( isnan( rotation.z ) !=0)){ rotation.z = 0.00f ; }
}
if(tracking_state == POSITIONAL_TRACKING_STATE::OK){
translation2.x = cam_pose.pose_data.getTranslation().x;
translation2.y = cam_pose.pose_data.getTranslation().y;
translation2.z = cam_pose.pose_data.getTranslation().z;
}
cout << "tracking_state : " << tracking_state << "translation2 : " << translation2.x << translation2.y << translation2.z << endl;
{
//----------------------------------------------(X,Y,Z)--------------------------------------------------------//
cv::putText( outputMat, “X:”+tostr( translation.x), cv::Point(200,100), 1, 1, cv::Scalar(255,255,255));
cv::putText( outputMat, “Y:”+tostr( translation.y), cv::Point(340,100), 1, 1, cv::Scalar(255,255,255));
cv::putText( outputMat, “Z:”+tostr( translation.z), cv::Point(480,100), 1, 1, cv::Scalar(255,255,255));
std::chrono::system_clock::time_point EndTime_Period = std::chrono::system_clock::now();
std::chrono::seconds mill_Period = std::chrono::duration_cast<std::chrono::seconds>(EndTime_Period - StartTime_Period);
cv::putText( outputMat, tostr(mill_Period.count()/60), cv::Point(1155,600), 2, 0.5, cv::Scalar(255,255,255));
cv::putText( outputMat, "Min", cv::Point(1150,620), 2, 0.5, cv::Scalar(255,255,255));
cv::circle( outputMat , cv::Point(1160,610),50, cv::Scalar(255,255,255), 2, 8, 0);
std::chrono::system_clock::time_point EndTime_latency = std::chrono::system_clock::now();
std::chrono::milliseconds mill = std::chrono::duration_cast<std::chrono::milliseconds>(EndTime_latency - StartTime_latency);
cv::putText( outputMat, "Latency:"+tostr(mill.count())+"ms", cv::Point(650,100), 1, 1.5, cv::Scalar(0, 0, 255));
writer.write(outputMat);
cv::imshow("ImgReceiver", outputMat);
if (cv::waitKey(1)=='q'){
break ;
}
}
#endif
}
#if ENABLE_GUI
image_left.free();
#endif
writer.release();
zed.disableStreaming();
zed.disablePositionalTracking();
zed.close();
return EXIT_SUCCESS;
}
-------------------------------------------------------code-------------------------------------------------------------
After “tracking_state = zed.getPosition( cam_pose, REFERENCE_FRAME::WORLD)”
if(tracking_state == POSITIONAL_TRACKING_STATE::OK){
translation2.x = cam_pose.pose_data.getTranslation().x;
translation2.y = cam_pose.pose_data.getTranslation().y;
translation2.z = cam_pose.pose_data.getTranslation().z;
}
cout << "tracking_state : " << tracking_state << "translation2 : " << translation2.x << translation2.y << translation2.z << endl;
Get the translation2 value from pose
If save and print, the Nan value is output.
Only when streaming, cam_pose_pose_data.getTranslation() outputs the Nan value. Is there any restriction on location information when streaming?
We need to find the distance the camera has moved. Help me…