FrameDrawer.cc 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439
  1. /**
  2. * This file is part of ORB-SLAM3
  3. *
  4. * Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
  5. * Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
  6. *
  7. * ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
  8. * License as published by the Free Software Foundation, either version 3 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
  12. * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License along with ORB-SLAM3.
  16. * If not, see <http://www.gnu.org/licenses/>.
  17. */
  18. #include "FrameDrawer.h"
  19. #include "Tracking.h"
  20. #include <opencv2/core/core.hpp>
  21. #include <opencv2/highgui/highgui.hpp>
  22. #include<mutex>
  23. namespace ORB_SLAM3
  24. {
  25. FrameDrawer::FrameDrawer(Atlas* pAtlas):both(false),mpAtlas(pAtlas)
  26. {
  27. mState=Tracking::SYSTEM_NOT_READY;
  28. mIm = cv::Mat(480,640,CV_8UC3, cv::Scalar(0,0,0));
  29. mImRight = cv::Mat(480,640,CV_8UC3, cv::Scalar(0,0,0));
  30. }
  31. cv::Mat FrameDrawer::DrawFrame(float imageScale)
  32. {
  33. cv::Mat im;
  34. vector<cv::KeyPoint> vIniKeys; // Initialization: KeyPoints in reference frame
  35. vector<int> vMatches; // Initialization: correspondeces with reference keypoints
  36. vector<cv::KeyPoint> vCurrentKeys; // KeyPoints in current frame
  37. vector<bool> vbVO, vbMap; // Tracked MapPoints in current frame
  38. vector<pair<cv::Point2f, cv::Point2f> > vTracks;
  39. int state; // Tracking state
  40. vector<float> vCurrentDepth;
  41. float thDepth;
  42. Frame currentFrame;
  43. vector<MapPoint*> vpLocalMap;
  44. vector<cv::KeyPoint> vMatchesKeys;
  45. vector<MapPoint*> vpMatchedMPs;
  46. vector<cv::KeyPoint> vOutlierKeys;
  47. vector<MapPoint*> vpOutlierMPs;
  48. map<long unsigned int, cv::Point2f> mProjectPoints;
  49. map<long unsigned int, cv::Point2f> mMatchedInImage;
  50. cv::Scalar standardColor(0,255,0);
  51. cv::Scalar odometryColor(255,0,0);
  52. //Copy variables within scoped mutex
  53. {
  54. unique_lock<mutex> lock(mMutex);
  55. state=mState;
  56. if(mState==Tracking::SYSTEM_NOT_READY)
  57. mState=Tracking::NO_IMAGES_YET;
  58. mIm.copyTo(im);
  59. if(mState==Tracking::NOT_INITIALIZED)
  60. {
  61. vCurrentKeys = mvCurrentKeys;
  62. vIniKeys = mvIniKeys;
  63. vMatches = mvIniMatches;
  64. vTracks = mvTracks;
  65. }
  66. else if(mState==Tracking::OK)
  67. {
  68. vCurrentKeys = mvCurrentKeys;
  69. vbVO = mvbVO;
  70. vbMap = mvbMap;
  71. currentFrame = mCurrentFrame;
  72. vpLocalMap = mvpLocalMap;
  73. vMatchesKeys = mvMatchedKeys;
  74. vpMatchedMPs = mvpMatchedMPs;
  75. vOutlierKeys = mvOutlierKeys;
  76. vpOutlierMPs = mvpOutlierMPs;
  77. mProjectPoints = mmProjectPoints;
  78. mMatchedInImage = mmMatchedInImage;
  79. vCurrentDepth = mvCurrentDepth;
  80. thDepth = mThDepth;
  81. }
  82. else if(mState==Tracking::LOST)
  83. {
  84. vCurrentKeys = mvCurrentKeys;
  85. }
  86. }
  87. if(imageScale != 1.f)
  88. {
  89. int imWidth = im.cols / imageScale;
  90. int imHeight = im.rows / imageScale;
  91. cv::resize(im, im, cv::Size(imWidth, imHeight));
  92. }
  93. if(im.channels()<3) //this should be always true
  94. cvtColor(im,im,cv::COLOR_GRAY2BGR);
  95. //Draw
  96. if(state==Tracking::NOT_INITIALIZED)
  97. {
  98. for(unsigned int i=0; i<vMatches.size(); i++)
  99. {
  100. if(vMatches[i]>=0)
  101. {
  102. cv::Point2f pt1,pt2;
  103. if(imageScale != 1.f)
  104. {
  105. pt1 = vIniKeys[i].pt / imageScale;
  106. pt2 = vCurrentKeys[vMatches[i]].pt / imageScale;
  107. }
  108. else
  109. {
  110. pt1 = vIniKeys[i].pt;
  111. pt2 = vCurrentKeys[vMatches[i]].pt;
  112. }
  113. cv::line(im,pt1,pt2,standardColor);
  114. }
  115. }
  116. for(vector<pair<cv::Point2f, cv::Point2f> >::iterator it=vTracks.begin(); it!=vTracks.end(); it++)
  117. {
  118. cv::Point2f pt1,pt2;
  119. if(imageScale != 1.f)
  120. {
  121. pt1 = (*it).first / imageScale;
  122. pt2 = (*it).second / imageScale;
  123. }
  124. else
  125. {
  126. pt1 = (*it).first;
  127. pt2 = (*it).second;
  128. }
  129. cv::line(im,pt1,pt2, standardColor,5);
  130. }
  131. }
  132. else if(state==Tracking::OK) //TRACKING
  133. {
  134. mnTracked=0;
  135. mnTrackedVO=0;
  136. const float r = 5;
  137. int n = vCurrentKeys.size();
  138. for(int i=0;i<n;i++)
  139. {
  140. if(vbVO[i] || vbMap[i])
  141. {
  142. cv::Point2f pt1,pt2;
  143. cv::Point2f point;
  144. if(imageScale != 1.f)
  145. {
  146. point = vCurrentKeys[i].pt / imageScale;
  147. float px = vCurrentKeys[i].pt.x / imageScale;
  148. float py = vCurrentKeys[i].pt.y / imageScale;
  149. pt1.x=px-r;
  150. pt1.y=py-r;
  151. pt2.x=px+r;
  152. pt2.y=py+r;
  153. }
  154. else
  155. {
  156. point = vCurrentKeys[i].pt;
  157. pt1.x=vCurrentKeys[i].pt.x-r;
  158. pt1.y=vCurrentKeys[i].pt.y-r;
  159. pt2.x=vCurrentKeys[i].pt.x+r;
  160. pt2.y=vCurrentKeys[i].pt.y+r;
  161. }
  162. // This is a match to a MapPoint in the map
  163. if(vbMap[i])
  164. {
  165. cv::rectangle(im,pt1,pt2,standardColor);
  166. cv::circle(im,point,2,standardColor,-1);
  167. mnTracked++;
  168. }
  169. else // This is match to a "visual odometry" MapPoint created in the last frame
  170. {
  171. cv::rectangle(im,pt1,pt2,odometryColor);
  172. cv::circle(im,point,2,odometryColor,-1);
  173. mnTrackedVO++;
  174. }
  175. }
  176. }
  177. }
  178. cv::Mat imWithInfo;
  179. DrawTextInfo(im,state, imWithInfo);
  180. return imWithInfo;
  181. }
  182. cv::Mat FrameDrawer::DrawRightFrame(float imageScale)
  183. {
  184. cv::Mat im;
  185. vector<cv::KeyPoint> vIniKeys; // Initialization: KeyPoints in reference frame
  186. vector<int> vMatches; // Initialization: correspondeces with reference keypoints
  187. vector<cv::KeyPoint> vCurrentKeys; // KeyPoints in current frame
  188. vector<bool> vbVO, vbMap; // Tracked MapPoints in current frame
  189. int state; // Tracking state
  190. //Copy variables within scoped mutex
  191. {
  192. unique_lock<mutex> lock(mMutex);
  193. state=mState;
  194. if(mState==Tracking::SYSTEM_NOT_READY)
  195. mState=Tracking::NO_IMAGES_YET;
  196. mImRight.copyTo(im);
  197. if(mState==Tracking::NOT_INITIALIZED)
  198. {
  199. vCurrentKeys = mvCurrentKeysRight;
  200. vIniKeys = mvIniKeys;
  201. vMatches = mvIniMatches;
  202. }
  203. else if(mState==Tracking::OK)
  204. {
  205. vCurrentKeys = mvCurrentKeysRight;
  206. vbVO = mvbVO;
  207. vbMap = mvbMap;
  208. }
  209. else if(mState==Tracking::LOST)
  210. {
  211. vCurrentKeys = mvCurrentKeysRight;
  212. }
  213. } // destroy scoped mutex -> release mutex
  214. if(imageScale != 1.f)
  215. {
  216. int imWidth = im.cols / imageScale;
  217. int imHeight = im.rows / imageScale;
  218. cv::resize(im, im, cv::Size(imWidth, imHeight));
  219. }
  220. if(im.channels()<3) //this should be always true
  221. cvtColor(im,im,cv::COLOR_GRAY2BGR);
  222. //Draw
  223. if(state==Tracking::NOT_INITIALIZED) //INITIALIZING
  224. {
  225. for(unsigned int i=0; i<vMatches.size(); i++)
  226. {
  227. if(vMatches[i]>=0)
  228. {
  229. cv::Point2f pt1,pt2;
  230. if(imageScale != 1.f)
  231. {
  232. pt1 = vIniKeys[i].pt / imageScale;
  233. pt2 = vCurrentKeys[vMatches[i]].pt / imageScale;
  234. }
  235. else
  236. {
  237. pt1 = vIniKeys[i].pt;
  238. pt2 = vCurrentKeys[vMatches[i]].pt;
  239. }
  240. cv::line(im,pt1,pt2,cv::Scalar(0,255,0));
  241. }
  242. }
  243. }
  244. else if(state==Tracking::OK) //TRACKING
  245. {
  246. mnTracked=0;
  247. mnTrackedVO=0;
  248. const float r = 5;
  249. const int n = mvCurrentKeysRight.size();
  250. const int Nleft = mvCurrentKeys.size();
  251. for(int i=0;i<n;i++)
  252. {
  253. if(vbVO[i + Nleft] || vbMap[i + Nleft])
  254. {
  255. cv::Point2f pt1,pt2;
  256. cv::Point2f point;
  257. if(imageScale != 1.f)
  258. {
  259. point = mvCurrentKeysRight[i].pt / imageScale;
  260. float px = mvCurrentKeysRight[i].pt.x / imageScale;
  261. float py = mvCurrentKeysRight[i].pt.y / imageScale;
  262. pt1.x=px-r;
  263. pt1.y=py-r;
  264. pt2.x=px+r;
  265. pt2.y=py+r;
  266. }
  267. else
  268. {
  269. point = mvCurrentKeysRight[i].pt;
  270. pt1.x=mvCurrentKeysRight[i].pt.x-r;
  271. pt1.y=mvCurrentKeysRight[i].pt.y-r;
  272. pt2.x=mvCurrentKeysRight[i].pt.x+r;
  273. pt2.y=mvCurrentKeysRight[i].pt.y+r;
  274. }
  275. // This is a match to a MapPoint in the map
  276. if(vbMap[i + Nleft])
  277. {
  278. cv::rectangle(im,pt1,pt2,cv::Scalar(0,255,0));
  279. cv::circle(im,point,2,cv::Scalar(0,255,0),-1);
  280. mnTracked++;
  281. }
  282. else // This is match to a "visual odometry" MapPoint created in the last frame
  283. {
  284. cv::rectangle(im,pt1,pt2,cv::Scalar(255,0,0));
  285. cv::circle(im,point,2,cv::Scalar(255,0,0),-1);
  286. mnTrackedVO++;
  287. }
  288. }
  289. }
  290. }
  291. cv::Mat imWithInfo;
  292. DrawTextInfo(im,state, imWithInfo);
  293. return imWithInfo;
  294. }
  295. void FrameDrawer::DrawTextInfo(cv::Mat &im, int nState, cv::Mat &imText)
  296. {
  297. stringstream s;
  298. if(nState==Tracking::NO_IMAGES_YET)
  299. s << " WAITING FOR IMAGES";
  300. else if(nState==Tracking::NOT_INITIALIZED)
  301. s << " TRYING TO INITIALIZE ";
  302. else if(nState==Tracking::OK)
  303. {
  304. if(!mbOnlyTracking)
  305. s << "SLAM MODE | ";
  306. else
  307. s << "LOCALIZATION | ";
  308. int nMaps = mpAtlas->CountMaps();
  309. int nKFs = mpAtlas->KeyFramesInMap();
  310. int nMPs = mpAtlas->MapPointsInMap();
  311. s << "Maps: " << nMaps << ", KFs: " << nKFs << ", MPs: " << nMPs << ", Matches: " << mnTracked;
  312. if(mnTrackedVO>0)
  313. s << ", + VO matches: " << mnTrackedVO;
  314. }
  315. else if(nState==Tracking::LOST)
  316. {
  317. s << " TRACK LOST. TRYING TO RELOCALIZE ";
  318. }
  319. else if(nState==Tracking::SYSTEM_NOT_READY)
  320. {
  321. s << " LOADING ORB VOCABULARY. PLEASE WAIT...";
  322. }
  323. int baseline=0;
  324. cv::Size textSize = cv::getTextSize(s.str(),cv::FONT_HERSHEY_PLAIN,1,1,&baseline);
  325. imText = cv::Mat(im.rows+textSize.height+10,im.cols,im.type());
  326. im.copyTo(imText.rowRange(0,im.rows).colRange(0,im.cols));
  327. imText.rowRange(im.rows,imText.rows) = cv::Mat::zeros(textSize.height+10,im.cols,im.type());
  328. cv::putText(imText,s.str(),cv::Point(5,imText.rows-5),cv::FONT_HERSHEY_PLAIN,1,cv::Scalar(255,255,255),1,8);
  329. }
  330. void FrameDrawer::Update(Tracking *pTracker)
  331. {
  332. unique_lock<mutex> lock(mMutex);
  333. pTracker->mImGray.copyTo(mIm);
  334. mvCurrentKeys=pTracker->mCurrentFrame.mvKeys;
  335. mThDepth = pTracker->mCurrentFrame.mThDepth;
  336. mvCurrentDepth = pTracker->mCurrentFrame.mvDepth;
  337. if(both){
  338. mvCurrentKeysRight = pTracker->mCurrentFrame.mvKeysRight;
  339. pTracker->mImRight.copyTo(mImRight);
  340. N = mvCurrentKeys.size() + mvCurrentKeysRight.size();
  341. }
  342. else{
  343. N = mvCurrentKeys.size();
  344. }
  345. mvbVO = vector<bool>(N,false);
  346. mvbMap = vector<bool>(N,false);
  347. mbOnlyTracking = pTracker->mbOnlyTracking;
  348. //Variables for the new visualization
  349. mCurrentFrame = pTracker->mCurrentFrame;
  350. mmProjectPoints = mCurrentFrame.mmProjectPoints;
  351. mmMatchedInImage.clear();
  352. mvpLocalMap = pTracker->GetLocalMapMPS();
  353. mvMatchedKeys.clear();
  354. mvMatchedKeys.reserve(N);
  355. mvpMatchedMPs.clear();
  356. mvpMatchedMPs.reserve(N);
  357. mvOutlierKeys.clear();
  358. mvOutlierKeys.reserve(N);
  359. mvpOutlierMPs.clear();
  360. mvpOutlierMPs.reserve(N);
  361. if(pTracker->mLastProcessedState==Tracking::NOT_INITIALIZED)
  362. {
  363. mvIniKeys=pTracker->mInitialFrame.mvKeys;
  364. mvIniMatches=pTracker->mvIniMatches;
  365. }
  366. else if(pTracker->mLastProcessedState==Tracking::OK)
  367. {
  368. for(int i=0;i<N;i++)
  369. {
  370. MapPoint* pMP = pTracker->mCurrentFrame.mvpMapPoints[i];
  371. if(pMP)
  372. {
  373. if(!pTracker->mCurrentFrame.mvbOutlier[i])
  374. {
  375. if(pMP->Observations()>0)
  376. mvbMap[i]=true;
  377. else
  378. mvbVO[i]=true;
  379. mmMatchedInImage[pMP->mnId] = mvCurrentKeys[i].pt;
  380. }
  381. else
  382. {
  383. mvpOutlierMPs.push_back(pMP);
  384. mvOutlierKeys.push_back(mvCurrentKeys[i]);
  385. }
  386. }
  387. }
  388. }
  389. mState=static_cast<int>(pTracker->mLastProcessedState);
  390. }
  391. } //namespace ORB_SLAM