-
Notifications
You must be signed in to change notification settings - Fork 6
/
Mapping.py
208 lines (161 loc) · 8.18 KB
/
Mapping.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
from matplotlib import pyplot as plt
import numpy as np
import scipy as sp
from parseData import MAX_RANGE_CLIP_DEFAULT, RANGE_RESOLUTION_CART_M, convertPolarImageToCartesian
from trajectoryPlotting import Trajectory
# import m2dp
from getPointCloud import getPointCloudPolarInd
from utils import getRotationMatrix
from motionDistortion import MotionDistortionSolver
# Thresholds
ROT_THRESHOLD = 0.2 # radians
TRANS_THRESHOLD = 2.0 # meters
TRANS_THRESHOLD_SQ = TRANS_THRESHOLD * TRANS_THRESHOLD # meters^2
RADAR_CART_CENTER = None
# Keyframe class
class Keyframe():
def __init__(self, globalPose: np.ndarray, featurePointsLocal: np.ndarray,
radarPolarImg: np.ndarray, velocity: np.ndarray) -> None:
'''
@brief Keyframe class. Contains pose, feature points and point cloud information
@param[in] globalPose (3 x 1) Pose information [x, y, th] in global coordinates,
units of [m, m, rad] # TODO: Confirm these units
@param[in] featurePointsLocal (K x 2) Tracked feature points from previous keyframe,
in local coordinates (pixels)
@param[in] radarPolarImg (M x N) Radar polar (range-azimuth) image
@see updateInfo()
'''
self.updateInfo(globalPose, featurePointsLocal, radarPolarImg, velocity)
def updateInfo(self, globalPose: np.ndarray,
featurePointsLocal: np.ndarray,
radarPolarImg: np.ndarray,
velocity: np.ndarray) -> None:
'''
@brief Update internal information: pose, feature points and point cloud information
@param[in] globalPose (3 x 1) Pose information [x, y, th] in global coordinates,
units of [m, m, rad] # TODO: Confirm these units
@param[in] featurePointsLocal (K x 2) Tracked feature points from previous keyframe,
in local coordinates (pixels)
@param[in] radarPolarImg (M x N) Radar polar (range-azimuth) image
'''
self.pose = globalPose
self.radarPolarImg = radarPolarImg # radar polar image
# Figure out and cache the center of the Cartesian image, needed for converting coordinates
global RADAR_CART_CENTER
if RADAR_CART_CENTER is None:
radarCartImg = convertPolarImageToCartesian(radarPolarImg)
RADAR_CART_CENTER = np.array(radarCartImg.shape) / 2
self.featurePointsLocal = featurePointsLocal # set of original feature points, in local (px coordinates)
self.prunedFeaturePoints = self.featurePointsLocal # set of pruned feature points (tracked until the next keyframe)
# TODO: Not sure if needed/useful
self.pointCloud = getPointCloudPolarInd(radarPolarImg)
self.velocity = velocity
self.featurePointsLocalUndistorted = MotionDistortionSolver.undistort(velocity, featurePointsLocal)[:, :2]
self.prunedUndistortedLocals = self.featurePointsLocalUndistorted
def copyFromOtherKeyframe(self, keyframe) -> None:
self.updateInfo(keyframe.pose, keyframe.featurePointsLocal,
keyframe.radarPolarImg)
def convertFeaturesLocalToGlobal(
self, featurePointsLocal: np.ndarray) -> np.ndarray:
'''
@brief Local feature points to convert into global coordinates, given internal pose
@param[in] featurePointsLocal (K x 2) Tracked feature points from previous keyframe,
in local coordinates (pixels)
@return featurePointsGlobal (K x 2) Feature points in global coordinates, meters
'''
x, y, th = self.pose
# First translate local points to origin at center
featurePointsGlobal = featurePointsLocal - RADAR_CART_CENTER
# Then we need to convert to meters
featurePointsGlobal *= RANGE_RESOLUTION_CART_M # px * (m/px) = m
# Center origin at pose center
# Rotate and translate to make into global coordinate system
R = getRotationMatrix(th)
t = np.array([x, y]).reshape(2, 1)
featurePointsGlobal = (R @ (featurePointsGlobal.T) + t).T
return featurePointsGlobal
def getPrunedFeaturesGlobalPosition(self) -> np.ndarray:
'''
@brief Get global position of pruned features (stored internally)
@return Global position of pruned features (K x 2)
'''
x, y, th = self.pose
# First translate local points to origin at center
featurePointsGlobal = self.prunedUndistortedLocals
# Then we need to convert to meters
# Center origin at pose center
# Rotate and translate to make into global coordinate system
R = getRotationMatrix(th)
t = np.array([x, y]).reshape(2, 1)
featurePointsGlobal = (R @ (featurePointsGlobal.T) + t).T
return featurePointsGlobal
def pruneFeaturePoints(self, corrStatus: np.ndarray) -> None:
'''
@brief Prune feature points based on correspondence status
@param[in] corrStatus
@note In place changing of `self.prunedFeaturePoints` function, which aims to track and prune away the feature points that are part of this keyframe
'''
self.prunedFeaturePoints = self.prunedFeaturePoints[corrStatus.flatten().astype(bool)]
self.prunedUndistortedLocals = self.prunedUndistortedLocals[corrStatus.flatten().astype(bool)]
# Map class
class Map():
def __init__(self, sequenceName: str, estTraj: Trajectory,
imgPathArr: list[str], filePaths: dict[str]) -> None:
self.sequenceName = sequenceName
self.imgPathArr = imgPathArr
self.sequenceSize = len(self.imgPathArr)
self.filePaths = filePaths
self.estTraj = estTraj
# TODO: Instead of storing set of all keyframes, only store the last keyframe, and then a big array of map points in global coordinates
# should be a large np.array of global feature points
self.mapPoints = []
self.keyframes = []
def updateInternalTraj(self, traj: Trajectory):
self.estTraj = traj
def isGoodKeyframe(self, keyframe: Keyframe) -> bool:
'''
@brief Check if a keyframe is good for adding using information about relative rotation and translation
@return If keyframe passes checks
'''
# Get information of prev KF's pose
prevKF = self.keyframes[-1]
srcPose = prevKF.pose
# Get the information of possible KF's pose
targetPose = keyframe.pose
# Check rotation condition relative to last keyframe
deltaTh = np.abs(srcPose[2] - targetPose[2])
if (deltaTh >= ROT_THRESHOLD):
return True
# Check translation condition
deltaTrans = (srcPose[0:2] - targetPose[0:2])**2
deltaTrans = deltaTrans.sum()
if (deltaTrans >= TRANS_THRESHOLD_SQ):
return True
return False
def addKeyframe(self, keyframe: Keyframe) -> None:
'''
@brief Add a keyframe to the running pose graph
@param[in] keyframe Keyframe to add
'''
self.keyframes.append(keyframe)
def plot(self, fig: plt.figure, subsampleFactor: int = 5, show: bool = False) -> None:
'''
@brief Plot map points on plt figure
@param[in] fig plt figure to plot on @todo Currently unused
@param[in] subsampleFactor Subsampling amount to do for feature points plotting
Controls density of plotted points. Higher = less dense
@param[in] show Whether to plt.show()
'''
# TODO: For now, plot all the keyframe feature points
points_global = np.empty((0, 2))
for kf in self.keyframes:
points_global = np.vstack((points_global,kf.getPrunedFeaturesGlobalPosition()))
plt.scatter(points_global[::subsampleFactor, 0],
points_global[::subsampleFactor, 1],
marker='+',
color='g',
alpha=.8,
label='Map Points')
if show:
plt.show()
return