ORB SLA2
System.h
1 
22 #ifndef SYSTEM_H
23 #define SYSTEM_H
24 
25 #include<string>
26 #include<thread>
27 #include<opencv2/core/core.hpp>
28 
29 #include "Tracking.h"
30 #include "FrameDrawer.h"
31 #include "MapDrawer.h"
32 #include "Map.h"
33 #include "LocalMapping.h"
34 #include "LoopClosing.h"
35 #include "KeyFrameDatabase.h"
36 #include "ORBVocabulary.h"
37 #include "Viewer.h"
38 
39 namespace ORB_SLAM2
40 {
41 
42 class Viewer;
43 class FrameDrawer;
44 class Map;
45 class Tracking;
46 class LocalMapping;
47 class LoopClosing;
48 
49 class System
50 {
51 public:
52  // Input sensor
53  enum eSensor{
54  MONOCULAR=0,
55  STEREO=1,
56  RGBD=2
57  };
58 
59 public:
60 
61  // Initialize the SLAM system. It launches the Local Mapping, Loop Closing and Viewer threads.
62  System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor, const bool bUseViewer = true);
63 
64  // Proccess the given stereo frame. Images must be synchronized and rectified.
65  // Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
66  // Returns the camera pose (empty if tracking fails).
67  cv::Mat TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timestamp);
68 
69  // Process the given rgbd frame. Depthmap must be registered to the RGB frame.
70  // Input image: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
71  // Input depthmap: Float (CV_32F).
72  // Returns the camera pose (empty if tracking fails).
73  cv::Mat TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double &timestamp);
74 
75  // Proccess the given monocular frame
76  // Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
77  // Returns the camera pose (empty if tracking fails).
78  cv::Mat TrackMonocular(const cv::Mat &im, const double &timestamp);
79 
80  // This stops local mapping thread (map building) and performs only camera tracking.
81  void ActivateLocalizationMode();
82  // This resumes local mapping thread and performs SLAM again.
83  void DeactivateLocalizationMode();
84 
85  // Returns true if there have been a big map change (loop closure, global BA)
86  // since last call to this function
87  bool MapChanged();
88 
89  // Reset the system (clear map)
90  void Reset();
91 
92  // All threads will be requested to finish.
93  // It waits until all threads have finished.
94  // This function must be called before saving the trajectory.
95  void Shutdown();
96 
97  // Save camera trajectory in the TUM RGB-D dataset format.
98  // Only for stereo and RGB-D. This method does not work for monocular.
99  // Call first Shutdown()
100  // See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
101  void SaveTrajectoryTUM(const string &filename);
102 
103  // Save keyframe poses in the TUM RGB-D dataset format.
104  // This method works for all sensor input.
105  // Call first Shutdown()
106  // See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
107  void SaveKeyFrameTrajectoryTUM(const string &filename);
108 
109  // Save camera trajectory in the KITTI dataset format.
110  // Only for stereo and RGB-D. This method does not work for monocular.
111  // Call first Shutdown()
112  // See format details at: http://www.cvlibs.net/datasets/kitti/eval_odometry.php
113  void SaveTrajectoryKITTI(const string &filename);
114 
115  // TODO: Save/Load functions
116  // SaveMap(const string &filename);
117  // LoadMap(const string &filename);
118 
119  // Information from most recent processed frame
120  // You can call this right after TrackMonocular (or stereo or RGBD)
121  int GetTrackingState();
122  std::vector<MapPoint*> GetTrackedMapPoints();
123  std::vector<cv::KeyPoint> GetTrackedKeyPointsUn();
124 
125 private:
126 
127  // Input sensor
128  eSensor mSensor;
129 
130  // ORB vocabulary used for place recognition and feature matching.
131  ORBVocabulary* mpVocabulary;
132 
133  // KeyFrame database for place recognition (relocalization and loop detection).
134  KeyFrameDatabase* mpKeyFrameDatabase;
135 
136  // Map structure that stores the pointers to all KeyFrames and MapPoints.
137  Map* mpMap;
138 
139  // Tracker. It receives a frame and computes the associated camera pose.
140  // It also decides when to insert a new keyframe, create some new MapPoints and
141  // performs relocalization if tracking fails.
142  Tracking* mpTracker;
143 
144  // Local Mapper. It manages the local map and performs local bundle adjustment.
145  LocalMapping* mpLocalMapper;
146 
147  // Loop Closer. It searches loops with every new keyframe. If there is a loop it performs
148  // a pose graph optimization and full bundle adjustment (in a new thread) afterwards.
149  LoopClosing* mpLoopCloser;
150 
151  // The viewer draws the map and the current camera pose. It uses Pangolin.
152  Viewer* mpViewer;
153 
154  FrameDrawer* mpFrameDrawer;
155  MapDrawer* mpMapDrawer;
156 
157  // System threads: Local Mapping, Loop Closing, Viewer.
158  // The Tracking thread "lives" in the main execution thread that creates the System object.
159  std::thread* mptLocalMapping;
160  std::thread* mptLoopClosing;
161  std::thread* mptViewer;
162 
163  // Reset flag
164  std::mutex mMutexReset;
165  bool mbReset;
166 
167  // Change mode flags
168  std::mutex mMutexMode;
169  bool mbActivateLocalizationMode;
170  bool mbDeactivateLocalizationMode;
171 
172  // Tracking state
173  int mTrackingState;
174  std::vector<MapPoint*> mTrackedMapPoints;
175  std::vector<cv::KeyPoint> mTrackedKeyPointsUn;
176  std::mutex mMutexState;
177 };
178 
179 }// namespace ORB_SLAM
180 
181 #endif // SYSTEM_H
Definition: Tracking.h:53
Definition: Viewer.h:40
Definition: LoopClosing.h:44
Definition: KeyFrameDatabase.h:42
Definition: Converter.cpp:24
Definition: MapDrawer.h:34
Definition: FrameDrawer.h:40
Definition: System.h:49
Definition: Map.h:38
Definition: LocalMapping.h:40