Example of how to get synchronized video and sensors data from the VideoCapture class and the SensorCapture class.
 
 
#include <iostream>
#include <sstream>
#include <iomanip>
#include <thread>
#include <mutex>
 
#include <opencv2/opencv.hpp>
 
 
 
 
int main(
int argc, 
char *argv[])
 
{
    
    (void)argc;
    (void)argv;
    
 
    
    
 
    
 
    
    
 
    
    if( !videoCap.initializeVideo(-1) )
    {
        std::cerr << "Cannot open camera video capture" << std::endl;
        std::cerr << "Try to enable verbose to get more info" << std::endl;
 
        return EXIT_FAILURE;
    }
 
    
    int camSn = videoCap.getSerialNumber();
 
    std::cout << "Video Capture connected to camera sn: " << camSn << std::endl;
    
 
    
    if( !sensCap.initializeSensors(camSn) ) 
    {
        std::cerr << "Cannot open sensors capture" << std::endl;
        std::cerr << "Try to enable verbose to get more info" << std::endl;
 
        return EXIT_FAILURE;
    }
    std::cout << "Sensors Capture connected to camera sn: " << sensCap.getSerialNumber() << std::endl;
 
    
    
    
 
    
    videoCap.enableSensorSync(&sensCap);
    
 
    
    int w,h;
    videoCap.getFrameSize(w,h);
 
    cv::Size display_resolution(1024, 576);
 
    {
    default:
        display_resolution.width = w;
        display_resolution.height = h;
        break;
        display_resolution.width = w*0.6;
        display_resolution.height = h*0.6;
        break;
        display_resolution.width = w*0.4;
        display_resolution.height = h*0.4;
        break;
    }
 
    int h_data = 70;
    cv::Mat frameDisplay(display_resolution.height + h_data, display_resolution.width,CV_8UC3, cv::Scalar(0,0,0));
    cv::Mat frameData = frameDisplay(cv::Rect(0,0, display_resolution.width, h_data));
    cv::Mat frameBGRDisplay = frameDisplay(cv::Rect(0,h_data, display_resolution.width, display_resolution.height));
    cv::Mat 
frameBGR(h, w, CV_8UC3, cv::Scalar(0,0,0));
    
 
    uint64_t last_timestamp = 0;
 
    float frame_fps=0;
 
    
    while (1)
    {
        
        
 
        
        std::stringstream videoTs;
        {
            frame_fps = 1e9/
static_cast<float>(frame.
timestamp-last_timestamp);
 
            
            
        }
        
 
        
        videoTs << std::fixed << std::setprecision(9) << "Video timestamp: " << static_cast<double>(last_timestamp)/1e9<< " sec" ;
        if( last_timestamp!=0 )
            videoTs << std::fixed << std::setprecision(1)  << " [" << frame_fps << " Hz]";
        
 
        
        {
            frameData.setTo(0);
 
            
            cv::putText( frameData, videoTs.str(), cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(241,240,236));
 
            
            cv::putText( frameData, 
imuTsStr, cv::Point(10, 35),cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(241,240,236));
 
            
            std::stringstream offsetStr;
            offsetStr << std::fixed << std::setprecision(9) << std::showpos << "Timestamp offset: " << offset << " sec [video-sensors]";
            cv::putText( frameData, offsetStr.str().c_str(), cv::Point(10, 50),cv::FONT_HERSHEY_SIMPLEX, 0.35, cv::Scalar(241,240,236));
 
            
            {
                static double sum=0;
                static int count=0;
 
                sum += offset;
                double avg_offset=sum/(++count);
 
                std::stringstream avgOffsetStr;
                avgOffsetStr << std::fixed << std::setprecision(9) << std::showpos << "Avg timestamp offset: " << avg_offset << " sec";
                cv::putText( frameData, avgOffsetStr.str().c_str(), cv::Point(10,62),cv::FONT_HERSHEY_SIMPLEX,0.35, cv::Scalar(241, 240,236));
            }
 
            
            cv::putText( frameData, "Inertial sensor data:", cv::Point(display_resolution.width/2,20),cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(241, 240,236));
            cv::putText( frameData, 
imuAccelStr, cv::Point(display_resolution.width/2+15,42),cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(241, 240,236));
            cv::putText( frameData, 
imuGyroStr, cv::Point(display_resolution.width/2+15, 62),cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(241, 240,236));
 
            
            cv::resize(
frameBGR, frameBGRDisplay, display_resolution);
            
            cv::imshow( "Stream RGB", frameDisplay);
        }
        
 
        
        int key = cv::waitKey(1);
 
        if( key != -1 )
        {
            
            if(key=='q' || key=='Q'|| key==27)
            {
                sensThread.join();
                break;
            }
        }
        
    }
 
    return EXIT_SUCCESS;
}
 
{
    
 
    
    uint64_t last_imu_ts = 0;
 
    
    {
        
 
        
        {
            
            std::stringstream timestamp;
            std::stringstream accel;
            std::stringstream gyro;
 
            timestamp << std::fixed << std::setprecision(9) << 
"IMU timestamp:   " << 
static_cast<double>(imuData.
timestamp)/1e9<< 
" sec" ;
            if(last_imu_ts!=0)
                timestamp << std::fixed << std::setprecision(1)  << 
" [" << 1e9/
static_cast<float>(imuData.
timestamp-last_imu_ts) << 
" Hz]";
 
            accel << std::fixed << std::showpos << std::setprecision(4) << 
" * Accel: " << imuData.
aX << 
" " << imuData.
aY << 
" " << imuData.
aZ << 
" [m/s^2]";
            gyro << std::fixed << std::showpos << std::setprecision(4) << 
" * Gyro: " << imuData.
gX << 
" " << imuData.
gY << 
" " << imuData.
gZ << 
" [deg/s]";
            
 
            
 
 
            
            {
            }
            
 
        }
        
    }
}
The SensorCapture class provides sensor grabbing functions for the Stereolabs ZED Mini and ZED2 camer...
const data::Imu & getLastIMUData(uint64_t timeout_usec=1500)
Get the last received IMU data.
The VideoCapture class provides image grabbing functions and settings control for all the Stereolabs ...
@ FPS_30
30 Frames per second. Not available for RESOLUTION::HD2K.
Contains the acquired Imu data.
float gX
Angular velocity around X axis in °/s.
float aX
Acceleration along X axis in m/s²
ImuStatus valid
Indicates if IMU data are valid.
uint64_t timestamp
Timestamp in nanoseconds.
bool sync
Indicates in IMU data are synchronized with a video frame.
float gZ
Angular velocity around > axis in °/s.
float gY
Angular velocity around Y axis in °/s.
float aY
Acceleration along Y axis in m/s²
float aZ
Acceleration along Z axis in m/s²
The Frame struct containing the acquired video frames.
uint64_t timestamp
Timestamp in nanoseconds.
uint16_t height
Frame height.
uint64_t frame_id
Increasing index of frames.
uint16_t width
Frame width.
uint8_t * data
Frame data in YUV 4:2:2 format.
The camera configuration parameters.
RESOLUTION res
Camera resolution.
FPS fps
Frames per second.
int main(int argc, char *argv[])
void getSensorThreadFunc(sl_oc::sensors::SensorCapture *sensCap)
sl_oc::video::VideoParams params