When streaming, the camera location information Nan value

Code
-------------------------------------------------------code-------------------------------------------------------------
// Standard includes
#include
#include
#include
#include
#include
#include
// Flag to disable the GUI to increase detection performances
// On low-end hardware such as Jetson Nano, the GUI significantly slows
// down the detection and increase the memory consumption
#define ENABLE_GUI 1

// ZED includes
#include <sl/Camera.hpp>
#include <cuda.h>

// Sample includes
#if ENABLE_GUI
#include <opencv2/imgcodecs.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/stitching.hpp>
#include <opencv2/core/cuda.hpp>
#include <opencv2/imgproc.hpp>
#endif

// Using std and sl namespaces
using namespace std;
using namespace sl;
bool is_playback = false;
void print(string msg_prefix, ERROR_CODE err_code = ERROR_CODE::SUCCESS, string msg_suffix = “”);
void parseArgs(int argc, char **argv, InitParameters& param);
bool checkIsJetson();
template string tostr(const T& t);
string type2str(int type);
void MatType( Mat inputMat );
bool beforeImageFlag = false ;

vector< string> split(const string& s, char seperator) {
vector< string> output;
string::size_type prev_pos = 0, pos = 0;

while ((pos = s.find(seperator, pos)) != string::npos) {
    string substring(s.substr(prev_pos, pos - prev_pos));
    output.push_back(substring);
    prev_pos = ++pos;
}

output.push_back(s.substr(prev_pos, pos - prev_pos));
return output;

}
//---------------------------streaming function-------------------------------------------//
void setStreamParameter(InitParameters& init_p, string& argument) {
vector< string> configStream = split(argument, ‘:’);
String ip(configStream.at(0).c_str());
if (configStream.size() == 2) {
init_p.input.setFromStream(ip, atoi(configStream.at(1).c_str()));
} else init_p.input.setFromStream(ip);
}

//-----------------------------main start------------------------------------------------//
int main(int argc, char **argv) {
// Create ZED objects
Camera zed;
//-----------------------------Zed camera setting--------------------------------------------//
InitParameters init_parameters;
init_parameters.camera_resolution = RESOLUTION::HD720;
// init_parameters.coordinate_units = UNIT::METER;
init_parameters.camera_fps = 30 ;
if (checkIsJetson())
// On Jetson (Nano, TX1/2) the object detection combined with an heavy depth mode could reduce the frame rate too much
init_parameters.depth_mode = DEPTH_MODE::PERFORMANCE;
else
init_parameters.depth_mode = DEPTH_MODE::ULTRA;

init_parameters.depth_maximum_distance = 50.0f * 1000.0f;
init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP; // OpenGL's coordinate system is right_handed
init_parameters.sdk_verbose = true;
init_parameters.sensors_required = true;
parseArgs(argc, argv, init_parameters);

//----------------------------streaming parameter receive-------------------------------------//
string stream_params;
if (argc > 1) {
stream_params = string(argv[1]);
}else {
cout << “Usage : ./ImgReceiver IP:[port]\n”;
cin >> stream_params;
}
setStreamParameter(init_parameters, stream_params);

// Open the camera
ERROR_CODE zed_open_state = zed.open(init_parameters);
if (zed_open_state != ERROR_CODE::SUCCESS) {
    print("Camera Open", zed_open_state, "Exit program.");
return EXIT_FAILURE;
}

auto camera_infos = zed.getCameraInformation();

PositionalTrackingParameters tracking_parameters;
tracking_parameters.enable_area_memory = false;
ERROR_CODE zed_tracking_state = zed.enablePositionalTracking(tracking_parameters);
if (zed_tracking_state !=ERROR_CODE::SUCCESS){
        cout << "ERROR" << zed_tracking_state << ", exit program.\n";
        return EXIT_FAILURE;
}
bool quit = false;

#if ENABLE_GUI
auto res = camera_infos.camera_configuration.resolution;
Resolution display_resolution(min((int)res.width, 1280) , min((int)res.height, 720));
Mat image_left(display_resolution, MAT_TYPE::U8_C4);

char key = ' ';
auto camera_parameters = zed.getCameraInformation(display_resolution).camera_configuration.calibration_parameters.left_cam;

#endif

sl::RuntimeParameters runtime_parameters;
runtime_parameters.confidence_threshold = 50; //50;

Pose cam_pose;
cam_pose.pose_data.setIdentity();

cv::VideoWriter writer;   
cv::Mat outputMat;
cv::Mat outputRotate ;

sl::Resolution resolution = zed.getCameraInformation().camera_configuration.resolution ;

UnionMat image( resolution, MAT_TYPE::U8_C4, MEM::GPU);

cv::cuda::GpuMat result ;
cv::cuda::GpuMat result_image ;

//----------------------------------------------gstreamer start-----------------------------------------------//
writer.open("appsrc ! videoconvert ! "
"omxh264enc control-rate=2 bitrate=4000000 ! video/x-h264, stream-format=byte-stream ! "
“queue leaky=2 ! tcpserversink port=5000 host=0.0.0.0 recover-policy=keyframe”, 0, (double)30, cv::Size(1280,720), true);

// sensors data
SensorsData sensors_data ;
SensorsData::BarometerData barometer_data ;
SensorsData::IMUData imu_data ;

POSITIONAL_TRACKING_STATE tracking_state = POSITIONAL_TRACKING_STATE::OFF;
double fps = zed.getCurrentFPS() ;

std::chrono::system_clock::time_point StartTime_Period = std::chrono::system_clock::now() ; // period time  start

Timestamp last_image_timestamp ;
sl::float3 translation ;
sl::float3 translation2 ;
sl::float3 rotation ;
sl::float3 rotation2 ;

while (!quit ) { 

    if (zed.grab(runtime_parameters) !=  ERROR_CODE::SUCCESS) {
        zed.reboot(0);
     }

//---------------------camera pose, sensor data -------------------------------------------------//
tracking_state = zed.getPosition( cam_pose, REFERENCE_FRAME::WORLD);

    zed.getSensorsData( sensors_data, TIME_REFERENCE::IMAGE);  //sensordata

    barometer_data = sensors_data.barometer ;  //sensor data

    float pressure = barometer_data.pressure ; //pressure
    
    imu_data = sensors_data.imu; 
    float relative_altitude = barometer_data.relative_altitude ; //altitude 
    sl::float3 linear_acceleration = imu_data.linear_acceleration;
    sl::float3 angular_velocity = imu_data.angular_velocity ;
    std::chrono::system_clock::time_point StartTime_latency = std::chrono::system_clock::now() ; // latency start 

    auto zed_translation = cam_pose.pose_data.getTranslation();
    {

#if ENABLE_GUI
zed.retrieveImage(image_left, VIEW::LEFT, MEM::CPU , display_resolution);
zed.retrieveImage(image.getZED(), VIEW::LEFT, MEM::GPU);
#else
#endif
}
if (is_playback && zed.getSVOPosition() == zed.getSVONumberOfFrames()) {
quit = true;
}

#if ENABLE_GUI
if (zed.getSensorsData(sensors_data, TIME_REFERENCE::IMAGE) == sl::ERROR_CODE::SUCCESS){

        translation.x = sensors_data.imu.pose.getTranslation().x;
        translation.y = sensors_data.imu.pose.getTranslation().y; 
        translation.z = sensors_data.imu.pose.getTranslation().z;

        rotation.x =  sensors_data.imu.pose.getEulerAngles().x;
        rotation.y =  sensors_data.imu.pose.getEulerAngles().y;
        rotation.z =  sensors_data.imu.pose.getEulerAngles().z;

        if((isnan( translation.x)!= 0 ) || isinf(translation.x) ){ translation.x = 0.f ;  }
        if((isnan( translation.y)!= 0 ) || isinf(translation.y) ){ translation.y = 0.f ;  }
        if((isnan( translation.z)!= 0 ) || isinf(translation.y) ){ translation.z = 0.f ;  }

        if(( isnan( rotation.x ) !=0)){ rotation.x = 0.00f ;  }
        if(( isnan( rotation.y ) !=0)){ rotation.y = 0.00f ;  }
        if(( isnan( rotation.z ) !=0)){ rotation.z = 0.00f ;  }
     }

    if(tracking_state == POSITIONAL_TRACKING_STATE::OK){
    translation2.x = cam_pose.pose_data.getTranslation().x;
    translation2.y = cam_pose.pose_data.getTranslation().y;
    translation2.z = cam_pose.pose_data.getTranslation().z;
    }
   cout << "tracking_state : " << tracking_state << "translation2 : " << translation2.x << translation2.y << translation2.z << endl;
	{

//----------------------------------------------(X,Y,Z)--------------------------------------------------------//
cv::putText( outputMat, “X:”+tostr( translation.x), cv::Point(200,100), 1, 1, cv::Scalar(255,255,255));
cv::putText( outputMat, “Y:”+tostr( translation.y), cv::Point(340,100), 1, 1, cv::Scalar(255,255,255));
cv::putText( outputMat, “Z:”+tostr( translation.z), cv::Point(480,100), 1, 1, cv::Scalar(255,255,255));

    std::chrono::system_clock::time_point EndTime_Period = std::chrono::system_clock::now();
    std::chrono::seconds mill_Period = std::chrono::duration_cast<std::chrono::seconds>(EndTime_Period - StartTime_Period);

    cv::putText( outputMat, tostr(mill_Period.count()/60), cv::Point(1155,600), 2, 0.5, cv::Scalar(255,255,255));
    cv::putText( outputMat, "Min", cv::Point(1150,620), 2, 0.5, cv::Scalar(255,255,255));
    cv::circle(  outputMat , cv::Point(1160,610),50, cv::Scalar(255,255,255), 2, 8, 0);

    std::chrono::system_clock::time_point EndTime_latency = std::chrono::system_clock::now();
    std::chrono::milliseconds mill = std::chrono::duration_cast<std::chrono::milliseconds>(EndTime_latency - StartTime_latency);
    cv::putText( outputMat, "Latency:"+tostr(mill.count())+"ms", cv::Point(650,100), 1, 1.5, cv::Scalar(0, 0, 255));
  
    writer.write(outputMat);	 
    cv::imshow("ImgReceiver", outputMat);
    
    if (cv::waitKey(1)=='q'){
        break ;
    }
 }

#endif
}
#if ENABLE_GUI
image_left.free();
#endif
writer.release();
zed.disableStreaming();
zed.disablePositionalTracking();
zed.close();
return EXIT_SUCCESS;
}

-------------------------------------------------------code-------------------------------------------------------------

After “tracking_state = zed.getPosition( cam_pose, REFERENCE_FRAME::WORLD)”

if(tracking_state == POSITIONAL_TRACKING_STATE::OK){
        translation2.x = cam_pose.pose_data.getTranslation().x;
        translation2.y = cam_pose.pose_data.getTranslation().y;
        translation2.z = cam_pose.pose_data.getTranslation().z;
    }

cout << "tracking_state : " << tracking_state << "translation2 : " << translation2.x << translation2.y << translation2.z << endl;

Get the translation2 value from pose
If save and print, the Nan value is output.

Only when streaming, cam_pose_pose_data.getTranslation() outputs the Nan value. Is there any restriction on location information when streaming?

image

We need to find the distance the camera has moved. Help me…

Hi @hail
can you please provide more information about the task that you are performing.
Your post is not really clear and the code formatting does not help in understanding the problem.

1 Like

explain with a sample.
(positional tracking sample)


///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2021, STEREOLABS.
//
// All rights reserved.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////

/*************************************************************************
** This sample demonstrates how to use the ZED for positional tracking **
** and display camera motion in an OpenGL window. **
**************************************************************************/

// ZED includes
#include <sl/Camera.hpp>

// Sample includes
#include “GLViewer.hpp”

// Using std namespace
using namespace std;
using namespace sl;

#define IMU_ONLY 0
const int MAX_CHAR = 128;

inline void setTxt(sl::float3 value, char* ptr_txt) {
snprintf(ptr_txt, MAX_CHAR, “%3.2f; %3.2f; %3.2f”, value.x, value.y, value.z);
}
//---------------------------streaming function-------------------------------------------//
vector< string> split(const string& s, char seperator) {
vector< string> output;
string::size_type prev_pos = 0, pos = 0;

while ((pos = s.find(seperator, pos)) != string::npos) {
    string substring(s.substr(prev_pos, pos - prev_pos));
    output.push_back(substring);
    prev_pos = ++pos;
}

output.push_back(s.substr(prev_pos, pos - prev_pos));
return output;

}
void setStreamParameter(InitParameters& init_p, string& argument) {
vector< string> configStream = split(argument, ‘:’);
String ip(configStream.at(0).c_str());
if (configStream.size() == 2) {
init_p.input.setFromStream(ip, atoi(configStream.at(1).c_str()));
} else init_p.input.setFromStream(ip);
}
//----------------------------------------------------------------------------------//

void parseArgs(int argc, char **argv, sl::InitParameters& param);

int main(int argc, char **argv) {

Camera zed;
// Set configuration parameters for the ZED
InitParameters init_parameters;
init_parameters.coordinate_units = UNIT::METER;
init_parameters.coordinate_system = COORDINATE_SYSTEM::RIGHT_HANDED_Y_UP;
init_parameters.sdk_verbose = true;
parseArgs(argc, argv, init_parameters);

//-----------------------------------------------streaming-----------------------------------------------
string stream_params;
if (argc > 1) {
stream_params = string(argv[1]);
} else {
cout << “Usage : ./ImgReceiver IP:[port]\n”;
cin >> stream_params;
}
setStreamParameter(init_parameters, stream_params);
//------------------------------------------------------------------------------------------------------------

// Open the camera
auto returned_state = zed.open(init_parameters);
if (returned_state != ERROR_CODE::SUCCESS) {
    print("Camera Open", returned_state, "Exit program.");
    return EXIT_FAILURE;
}

auto camera_model = zed.getCameraInformation().camera_model;
GLViewer viewer;
// Initialize OpenGL viewer
viewer.init(argc, argv, camera_model);

// Create text for GUI
char text_rotation[MAX_CHAR];
char text_translation[MAX_CHAR];

// Set parameters for Positional Tracking
PositionalTrackingParameters positional_tracking_param;

// positional_tracking_param.enable_area_memory = true;
// enable Positional Tracking
returned_state = zed.enablePositionalTracking(positional_tracking_param);
if (returned_state != ERROR_CODE::SUCCESS) {
print("Enabling positionnal tracking failed: ", returned_state);
zed.close();
return EXIT_FAILURE;
}

sl::float3 translation;
Pose camera_path;
POSITIONAL_TRACKING_STATE tracking_state;

#if IMU_ONLY
SensorsData sensors_data;
#endif

while (viewer.isAvailable()) {
    if (zed.grab() == ERROR_CODE::SUCCESS) {
        // Get the position of the camera in a fixed reference frame (the World Frame)

		tracking_state = zed.getPosition(camera_path, REFERENCE_FRAME::WORLD);

#if IMU_ONLY
if (zed.getSensorsData(sensors_data, TIME_REFERENCE::IMAGE) == sl::ERROR_CODE::SUCCESS) {
setTxt(sensors_data.imu.pose.getEulerAngles(), text_rotation); //only rotation is computed for IMU
viewer.updateData(sensors_data.imu.pose, string(text_translation), string(text_rotation), sl::POSITIONAL_TRACKING_STATE::OK);
cout<<"sensor : "<<text_translation<<endl;
}
#else
if (tracking_state == POSITIONAL_TRACKING_STATE::OK) {
// Get rotation and translation and displays it
setTxt(camera_path.getEulerAngles(), text_rotation);
setTxt(camera_path.getTranslation(), text_translation);
translation.x = camera_path.getTranslation().x;
translation.y = camera_path.getTranslation().y;
translation.z = camera_path.getTranslation().z;
cout<<"text_rotation : "<<text_rotation<<endl;
cout<<"text_translation : "<<text_translation<<endl;
cout<<"translation x : "<<translation.x<<"translation y : "<<translation.y<<"translation.z : "<<translation.z<<endl;
}

        // Update rotation, translation and tracking state values in the OpenGL window
        viewer.updateData(camera_path.pose_data, string(text_translation), string(text_rotation), tracking_state);

#endif

    } else
        sleep_ms(1);
}

zed.disablePositionalTracking();

//zed.disableRecording();
zed.close();
return EXIT_SUCCESS;

}

void parseArgs(int argc, char **argv, sl::InitParameters& param) {
if (argc > 1 && string(argv[1]).find(".svo") != string::npos) {
// SVO input mode
param.input.setFromSVOFile(argv[1]);
cout << “[Sample] Using SVO File input: " << argv[1] << endl;
} else if (argc > 1 && string(argv[1]).find(”.svo") == string::npos) {
string arg = string(argv[1]);
unsigned int a, b, c, d, port;
if (sscanf(arg.c_str(), “%u.%u.%u.%u:%d”, &a, &b, &c, &d, &port) == 5) {
// Stream input mode - IP + port
string ip_adress = to_string(a) + “.” + to_string(b) + “.” + to_string(c) + “.” + to_string(d);
param.input.setFromStream(sl::String(ip_adress.c_str()), port);
cout << "[Sample] Using Stream input, IP : " << ip_adress << ", port : " << port << endl;
} else if (sscanf(arg.c_str(), “%u.%u.%u.%u”, &a, &b, &c, &d) == 4) {
// Stream input mode - IP only
param.input.setFromStream(sl::String(argv[1]));
cout << "[Sample] Using Stream input, IP : " << argv[1] << endl;
} else if (arg.find(“HD2K”) != string::npos) {
param.camera_resolution = sl::RESOLUTION::HD2K;
cout << “[Sample] Using Camera in resolution HD2K” << endl;
} else if (arg.find(“HD1080”) != string::npos) {
param.camera_resolution = sl::RESOLUTION::HD1080;
cout << “[Sample] Using Camera in resolution HD1080” << endl;
} else if (arg.find(“HD720”) != string::npos) {
param.camera_resolution = sl::RESOLUTION::HD720;
cout << “[Sample] Using Camera in resolution HD720” << endl;
} else if (arg.find(“VGA”) != string::npos) {
param.camera_resolution = sl::RESOLUTION::VGA;
cout << “[Sample] Using Camera in resolution VGA” << endl;
}
} else {
// Default
}
}


I’m streaming to the TX2 board by connecting the ZED2 camera to the Nano board.
When streaming proceeds after adding the streaming function to the sample (positional tracking)
It doesn’t work for less than 5 seconds and it doesn’t work after it becomes “SEARCHING”.

Are there any restrictions on position information when streaming?

And
translation.x = sensors_data.imu.pose.getTranslation().x

There doesn’t seem to be any data on “imu.pose.getTranslation()”, what about this information?

I’m sorry, I’m not good at English…