mono_realsense_D435i.cc 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291
  1. /**
  2. * This file is part of ORB-SLAM3
  3. *
  4. * Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
  5. * Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
  6. *
  7. * ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
  8. * License as published by the Free Software Foundation, either version 3 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
  12. * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License along with ORB-SLAM3.
  16. * If not, see <http://www.gnu.org/licenses/>.
  17. */
  18. #include <signal.h>
  19. #include <stdlib.h>
  20. #include <iostream>
  21. #include <algorithm>
  22. #include <fstream>
  23. #include <chrono>
  24. #include <ctime>
  25. #include <sstream>
  26. #include <condition_variable>
  27. #include <opencv2/core/core.hpp>
  28. #include <librealsense2/rs.hpp>
  29. #include "librealsense2/rsutil.h"
  30. #include <System.h>
  31. using namespace std;
  32. bool b_continue_session;
  33. void exit_loop_handler(int s){
  34. cout << "Finishing session" << endl;
  35. b_continue_session = false;
  36. }
  37. rs2_vector interpolateMeasure(const double target_time,
  38. const rs2_vector current_data, const double current_time,
  39. const rs2_vector prev_data, const double prev_time);
  40. static rs2_option get_sensor_option(const rs2::sensor& sensor)
  41. {
  42. // Sensors usually have several options to control their properties
  43. // such as Exposure, Brightness etc.
  44. std::cout << "Sensor supports the following options:\n" << std::endl;
  45. // The following loop shows how to iterate over all available options
  46. // Starting from 0 until RS2_OPTION_COUNT (exclusive)
  47. for (int i = 0; i < static_cast<int>(RS2_OPTION_COUNT); i++)
  48. {
  49. rs2_option option_type = static_cast<rs2_option>(i);
  50. //SDK enum types can be streamed to get a string that represents them
  51. std::cout << " " << i << ": " << option_type;
  52. // To control an option, use the following api:
  53. // First, verify that the sensor actually supports this option
  54. if (sensor.supports(option_type))
  55. {
  56. std::cout << std::endl;
  57. // Get a human readable description of the option
  58. const char* description = sensor.get_option_description(option_type);
  59. std::cout << " Description : " << description << std::endl;
  60. // Get the current value of the option
  61. float current_value = sensor.get_option(option_type);
  62. std::cout << " Current Value : " << current_value << std::endl;
  63. //To change the value of an option, please follow the change_sensor_option() function
  64. }
  65. else
  66. {
  67. std::cout << " is not supported" << std::endl;
  68. }
  69. }
  70. uint32_t selected_sensor_option = 0;
  71. return static_cast<rs2_option>(selected_sensor_option);
  72. }
  73. int main(int argc, char **argv) {
  74. if (argc < 3 || argc > 4) {
  75. cerr << endl
  76. << "Usage: ./mono_realsense_D435i path_to_vocabulary path_to_settings (trajectory_file_name)"
  77. << endl;
  78. return 1;
  79. }
  80. string file_name;
  81. if (argc == 4) {
  82. file_name = string(argv[argc - 1]);
  83. }
  84. struct sigaction sigIntHandler;
  85. sigIntHandler.sa_handler = exit_loop_handler;
  86. sigemptyset(&sigIntHandler.sa_mask);
  87. sigIntHandler.sa_flags = 0;
  88. sigaction(SIGINT, &sigIntHandler, NULL);
  89. b_continue_session = true;
  90. double offset = 0; // ms
  91. rs2::context ctx;
  92. rs2::device_list devices = ctx.query_devices();
  93. rs2::device selected_device;
  94. if (devices.size() == 0)
  95. {
  96. std::cerr << "No device connected, please connect a RealSense device" << std::endl;
  97. return 0;
  98. }
  99. else
  100. selected_device = devices[0];
  101. std::vector<rs2::sensor> sensors = selected_device.query_sensors();
  102. int index = 0;
  103. // We can now iterate the sensors and print their names
  104. for (rs2::sensor sensor : sensors)
  105. if (sensor.supports(RS2_CAMERA_INFO_NAME)) {
  106. ++index;
  107. if (index == 1) {
  108. sensor.set_option(RS2_OPTION_ENABLE_AUTO_EXPOSURE, 1);
  109. sensor.set_option(RS2_OPTION_AUTO_EXPOSURE_LIMIT,5000);
  110. sensor.set_option(RS2_OPTION_EMITTER_ENABLED, 0); // switch off emitter
  111. }
  112. // std::cout << " " << index << " : " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
  113. get_sensor_option(sensor);
  114. if (index == 2){
  115. // RGB camera (not used here...)
  116. sensor.set_option(RS2_OPTION_EXPOSURE,100.f);
  117. }
  118. }
  119. // Declare RealSense pipeline, encapsulating the actual device and sensors
  120. rs2::pipeline pipe;
  121. // Create a configuration for configuring the pipeline with a non default profile
  122. rs2::config cfg;
  123. cfg.enable_stream(RS2_STREAM_INFRARED, 1, 640, 480, RS2_FORMAT_Y8, 30);
  124. // IMU callback
  125. std::mutex imu_mutex;
  126. std::condition_variable cond_image_rec;
  127. cv::Mat imCV;
  128. int width_img, height_img;
  129. double timestamp_image = -1.0;
  130. bool image_ready = false;
  131. int count_im_buffer = 0; // count dropped frames
  132. auto imu_callback = [&](const rs2::frame& frame)
  133. {
  134. std::unique_lock<std::mutex> lock(imu_mutex);
  135. if(rs2::frameset fs = frame.as<rs2::frameset>())
  136. {
  137. count_im_buffer++;
  138. double new_timestamp_image = fs.get_timestamp()*1e-3;
  139. if(abs(timestamp_image-new_timestamp_image)<0.001){
  140. // cout << "Two frames with the same timeStamp!!!\n";
  141. count_im_buffer--;
  142. return;
  143. }
  144. rs2::video_frame ir_frameL = fs.get_infrared_frame(1);
  145. imCV = cv::Mat(cv::Size(width_img, height_img), CV_8U, (void*)(ir_frameL.get_data()), cv::Mat::AUTO_STEP);
  146. timestamp_image = fs.get_timestamp()*1e-3;
  147. image_ready = true;
  148. lock.unlock();
  149. cond_image_rec.notify_all();
  150. }
  151. };
  152. rs2::pipeline_profile pipe_profile = pipe.start(cfg, imu_callback);
  153. rs2::stream_profile cam_left = pipe_profile.get_stream(RS2_STREAM_INFRARED, 1);
  154. rs2_intrinsics intrinsics_left = cam_left.as<rs2::video_stream_profile>().get_intrinsics();
  155. width_img = intrinsics_left.width;
  156. height_img = intrinsics_left.height;
  157. cout << "Left camera: \n";
  158. std::cout << " fx = " << intrinsics_left.fx << std::endl;
  159. std::cout << " fy = " << intrinsics_left.fy << std::endl;
  160. std::cout << " cx = " << intrinsics_left.ppx << std::endl;
  161. std::cout << " cy = " << intrinsics_left.ppy << std::endl;
  162. std::cout << " height = " << intrinsics_left.height << std::endl;
  163. std::cout << " width = " << intrinsics_left.width << std::endl;
  164. std::cout << " Coeff = " << intrinsics_left.coeffs[0] << ", " << intrinsics_left.coeffs[1] << ", " <<
  165. intrinsics_left.coeffs[2] << ", " << intrinsics_left.coeffs[3] << ", " << intrinsics_left.coeffs[4] << ", " << std::endl;
  166. std::cout << " Model = " << intrinsics_left.model << std::endl;
  167. // Create SLAM system. It initializes all system threads and gets ready to process frames.
  168. ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::MONOCULAR, true, 0, file_name);
  169. float imageScale = SLAM.GetImageScale();
  170. double timestamp;
  171. cv::Mat im;
  172. double t_resize = 0.f;
  173. double t_track = 0.f;
  174. while (!SLAM.isShutDown())
  175. {
  176. std::vector<rs2_vector> vGyro;
  177. std::vector<double> vGyro_times;
  178. std::vector<rs2_vector> vAccel;
  179. std::vector<double> vAccel_times;
  180. {
  181. std::unique_lock<std::mutex> lk(imu_mutex);
  182. if(!image_ready)
  183. cond_image_rec.wait(lk);
  184. #ifdef COMPILEDWITHC14
  185. std::chrono::steady_clock::time_point time_Start_Process = std::chrono::steady_clock::now();
  186. #else
  187. std::chrono::monotonic_clock::time_point time_Start_Process = std::chrono::monotonic_clock::now();
  188. #endif
  189. if(count_im_buffer>1)
  190. cout << count_im_buffer -1 << " dropped frs\n";
  191. count_im_buffer = 0;
  192. timestamp = timestamp_image;
  193. im = imCV.clone();
  194. image_ready = false;
  195. }
  196. if(imageScale != 1.f)
  197. {
  198. #ifdef REGISTER_TIMES
  199. #ifdef COMPILEDWITHC14
  200. std::chrono::steady_clock::time_point t_Start_Resize = std::chrono::steady_clock::now();
  201. #else
  202. std::chrono::monotonic_clock::time_point t_Start_Resize = std::chrono::monotonic_clock::now();
  203. #endif
  204. #endif
  205. int width = im.cols * imageScale;
  206. int height = im.rows * imageScale;
  207. cv::resize(im, im, cv::Size(width, height));
  208. #ifdef REGISTER_TIMES
  209. #ifdef COMPILEDWITHC14
  210. std::chrono::steady_clock::time_point t_End_Resize = std::chrono::steady_clock::now();
  211. #else
  212. std::chrono::monotonic_clock::time_point t_End_Resize = std::chrono::monotonic_clock::now();
  213. #endif
  214. t_resize = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t_End_Resize - t_Start_Resize).count();
  215. SLAM.InsertResizeTime(t_resize);
  216. #endif
  217. }
  218. #ifdef REGISTER_TIMES
  219. #ifdef COMPILEDWITHC14
  220. std::chrono::steady_clock::time_point t_Start_Track = std::chrono::steady_clock::now();
  221. #else
  222. std::chrono::monotonic_clock::time_point t_Start_Track = std::chrono::monotonic_clock::now();
  223. #endif
  224. #endif
  225. // Stereo images are already rectified.
  226. SLAM.TrackMonocular(im, timestamp);
  227. #ifdef REGISTER_TIMES
  228. #ifdef COMPILEDWITHC14
  229. std::chrono::steady_clock::time_point t_End_Track = std::chrono::steady_clock::now();
  230. #else
  231. std::chrono::monotonic_clock::time_point t_End_Track = std::chrono::monotonic_clock::now();
  232. #endif
  233. t_track = t_resize + std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t_End_Track - t_Start_Track).count();
  234. SLAM.InsertTrackTime(t_track);
  235. #endif
  236. }
  237. cout << "System shutdown!\n";
  238. }