Example of how to use the VideoCapture class to get and rectify raw video frames downloading calibration parameters from Stereolabs servers and then use OpenCV and T-API to extract the disparity map, the depth map, and finally generate the RGB point cloud.
#include <iostream>
#include <sstream>
#include <string>
#include <opencv2/opencv.hpp>
#ifdef HAVE_OPENCV_VIZ
#include <opencv2/viz.hpp>
#include <opencv2/viz/viz3d.hpp>
#endif
#define USE_OCV_TAPI
#define USE_HALF_SIZE_DISP
int main(
int argc,
char *argv[])
{
(void)argc;
(void)argv;
#ifdef EMBEDDED_ARM
#else
#endif
{
std::cerr << "Cannot open camera video capture" << std::endl;
std::cerr << "See verbosity level for more details." << std::endl;
return EXIT_FAILURE;
}
std::cout << "Connected to camera sn: " << sn << std::endl;
std::string calibration_file;
unsigned int serial_number = sn;
{
std::cerr << "Could not load calibration file from Stereolabs servers" << std::endl;
return EXIT_FAILURE;
}
std::cout << "Calibration file found. Loading..." << std::endl;
int w,h;
cv::Mat map_left_x, map_left_y;
cv::Mat map_right_x, map_right_y;
cv::Mat cameraMatrix_left, cameraMatrix_right;
double baseline=0;
cameraMatrix_left, cameraMatrix_right, &baseline);
double fx = cameraMatrix_left.at<double>(0,0);
double fy = cameraMatrix_left.at<double>(1,1);
double cx = cameraMatrix_left.at<double>(0,2);
double cy = cameraMatrix_left.at<double>(1,2);
std::cout << " Camera Matrix L: \n" << cameraMatrix_left << std::endl << std::endl;
std::cout << " Camera Matrix R: \n" << cameraMatrix_right << std::endl << std::endl;
#ifdef USE_OCV_TAPI
cv::UMat map_left_x_gpu = map_left_x.getUMat(cv::ACCESS_READ,cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat map_left_y_gpu = map_left_y.getUMat(cv::ACCESS_READ,cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat map_right_x_gpu = map_right_x.getUMat(cv::ACCESS_READ,cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat map_right_y_gpu = map_right_y.getUMat(cv::ACCESS_READ,cv::USAGE_ALLOCATE_DEVICE_MEMORY);
#endif
#ifdef USE_OCV_TAPI
cv::UMat
frameBGR(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat
left_raw(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat
right_raw(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat
left_rect(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat
right_rect(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat left_disp_half(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat
left_disp(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat left_disp_float(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat left_disp_image(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat left_depth_map(cv::USAGE_ALLOCATE_DEVICE_MEMORY);
#else
cv::Mat
frameBGR,
left_raw,
left_rect,
right_raw,
right_rect,
frameYUV,
left_for_matcher,
right_for_matcher, left_disp_half,
left_disp,left_disp_float,
left_disp_vis;
#endif
{
}
cv::Mat cloudMat;
#ifdef HAVE_OPENCV_VIZ
cv::viz::Viz3d pc_viewer = cv::viz::Viz3d( "Point Cloud" );
#endif
uint64_t last_ts=0;
while (1)
{
{
#ifdef USE_OCV_TAPI
cv::Mat frameYUV_cpu = cv::Mat( frame.
height, frame.
width, CV_8UC2, frame.
data );
frameYUV = frameYUV_cpu.getUMat(cv::ACCESS_READ,cv::USAGE_ALLOCATE_HOST_MEMORY);
#else
#endif
#ifdef USE_OCV_TAPI
#else
#endif
double remap_elapsed = remap_clock.
toc();
std::stringstream remapElabInfo;
remapElabInfo << "Rectif. processing: " << remap_elapsed << " sec - Freq: " << 1./remap_elapsed;
double resize_fact = 1.0;
#ifdef USE_HALF_SIZE_DISP
resize_fact = 0.5;
#else
#endif
left_disp_half.convertTo(left_disp_float,CV_32FC1);
cv::multiply(left_disp_float,1./16.,left_disp_float);
#ifdef USE_HALF_SIZE_DISP
cv::multiply(left_disp_float,2.,left_disp_float);
cv::UMat tmp = left_disp_float;
cv::resize(tmp, left_disp_float, cv::Size(), 1./resize_fact, 1./resize_fact, cv::INTER_AREA);
#else
#endif
double elapsed = stereo_clock.
toc();
std::stringstream stereoElabInfo;
stereoElabInfo << "Stereo processing: " << elapsed << " sec - Freq: " << 1./elapsed;
cv::applyColorMap(left_disp_image,left_disp_image,cv::COLORMAP_JET);
double num = static_cast<double>(fx*baseline);
cv::divide(num,left_disp_float,left_depth_map);
float central_depth = left_depth_map.getMat(cv::ACCESS_READ).at<float>(left_depth_map.rows/2, left_depth_map.cols/2 );
std::cout << "Depth of the central pixel: " << central_depth << " mm" << std::endl;
size_t buf_size = static_cast<size_t>(left_depth_map.cols * left_depth_map.rows);
std::vector<cv::Vec3d> buffer( buf_size, cv::Vec3f::all( std::numeric_limits<float>::quiet_NaN() ) );
cv::Mat depth_map_cpu = left_depth_map.getMat(cv::ACCESS_READ);
float* depth_vec = (float*)(&(depth_map_cpu.data[0]));
#pragma omp parallel for
for(size_t idx=0; idx<buf_size;idx++ )
{
size_t r = idx/left_depth_map.cols;
size_t c = idx%left_depth_map.cols;
double depth = static_cast<double>(depth_vec[idx]);
{
buffer[idx].val[2] = depth;
buffer[idx].val[0] = (c-cx)*depth/fx;
buffer[idx].val[1] = (r-cy)*depth/fy;
}
}
cloudMat = cv::Mat( left_depth_map.rows, left_depth_map.cols, CV_64FC3, &buffer[0] ).clone();
double pc_elapsed = stereo_clock.
toc();
std::stringstream pcElabInfo;
}
int key = cv::waitKey( 5 );
if(key=='q' || key=='Q')
break;
#ifdef HAVE_OPENCV_VIZ
cv::viz::WCloud cloudWidget( cloudMat,
left_rect );
cloudWidget.setRenderingProperty( cv::viz::POINT_SIZE, 1 );
pc_viewer.showWidget( "Point Cloud", cloudWidget );
pc_viewer.spinOnce(1);
if(pc_viewer.wasStopped())
break;
#endif
}
return EXIT_SUCCESS;
}
The VideoCapture class provides image grabbing functions and settings control for all the Stereolabs ...
const Frame & getLastFrame(uint64_t timeout_msec=100)
Get the last received camera image.
void getFrameSize(int &width, int &height)
Get the size of the camera frame.
bool initializeVideo(int devId=-1)
Open a ZED camera using the specified ID or searching for the first available.
int getSerialNumber()
Retrieve the serial number of the connected camera.
@ FPS_30
30 Frames per second. Not available for RESOLUTION::HD2K.
The Frame struct containing the acquired video frames.
uint64_t timestamp
Timestamp in nanoseconds.
uint16_t height
Frame height.
uint16_t width
Frame width.
uint8_t * data
Frame data in YUV 4:2:2 format.
The camera configuration parameters.
RESOLUTION res
Camera resolution.
FPS fps
Frames per second.
int main(int argc, char *argv[])
sl_oc::tools::StereoSgbmPar stereoPar
sl_oc::video::VideoParams params
cv::Mat right_for_matcher
cv::Ptr< cv::StereoSGBM > left_matcher