@@ -42,6 +42,8 @@ def __init__(self, name = "SearchPlanner_gridmap", params:dict = None, GPS_ping:
42
42
43
43
self .tf_buffer = Buffer ()
44
44
self .tf_listener = TransformListener (self .tf_buffer , self )
45
+ self .drone_position = PointStamped ()
46
+ self .sam_vel = Vector3Stamped ()
45
47
46
48
if params :
47
49
@@ -54,11 +56,12 @@ def __init__(self, name = "SearchPlanner_gridmap", params:dict = None, GPS_ping:
54
56
self .beta = params ["grid_map.update.true_detection_rate" ]
55
57
self .time_margin = params ["grid_map.update.time_margin" ]* 1e9 #nanoseconds
56
58
57
- self .camera_fov = (pi / 180 )* params ["camera_fov" ]
58
- self .flight_height = params ["flight_height" ]
59
+ self .camera_fov = (pi / 180 )* params ["drone. camera_fov" ]
60
+ self .flight_height = params ["drone. flight_height" ]
59
61
60
- self .drone_map_frame_id = params ['frames.id.quadrotor_map ' ]
62
+ self .map_frame_id = params ['frames.id.map ' ]
61
63
self .drone_odom_frame_id = params ['frames.id.quadrotor_odom' ]
64
+ self .sam_odom_frame_id = params ['frames.id.sam_odom' ]
62
65
63
66
else :
64
67
self .get_logger ().error ("No parameters received" )
@@ -86,8 +89,7 @@ def initiate_grid_map(self) -> None:
86
89
"""
87
90
88
91
# create grid map without probabilities (just its dimensions) as class attributes and ros msg.
89
- self .GPS_ping_odom = self .transform_point (self .GPS_ping )
90
- self .get_logger ().info (f'Received GPS ping in odom (x,y) = { round (float (self .GPS_ping_odom [0 ]),2 ), round (float (self .GPS_ping_odom [1 ]),2 )} ' )
92
+ self .GPS_ping_odom , _ = self .transform_point (self .GPS_ping )
91
93
self .createMap (self .GPS_ping_odom )
92
94
93
95
# gaussian prior, scale probabilities to fit int -> viewing purposes only
@@ -132,18 +134,8 @@ def kernel(self):
132
134
The kernel is a 3*3 matrix with bigger values on the elements that are aligned with SAM's velocity direction.
133
135
Eg: if SAM's velocity = (0,1), then the top-central element of the kernel will be significantly high
134
136
"""
135
- # Transform sam velocity to odom frame #TODO: check frame of SAM's Odometry
136
- t = self .tf_buffer .lookup_transform (
137
- target_frame = self .drone_odom_frame_id ,
138
- source_frame = self .drone_map_frame_id ,
139
- time = rclpy .time .Time ())
140
- vec = Vector3Stamped ()
141
- vec .header .frame_id = self .drone_map_frame_id
142
- vec .header .stamp = self .get_clock ().now ().to_msg ()
143
- vec .vector .x = self .sam_vel [0 ]
144
- vec .vector .y = self .sam_vel [1 ]
145
- result = tf2_geometry_msgs .do_transform_vector3 (vec , t )
146
- sam_vel_odom = [result .vector .x , result .vector .y ]
137
+
138
+ sam_vel_odom = self .transform_vector (self .sam_vel )
147
139
148
140
# Compute weights by mapping SAM's velocity to closest base vector and retrieving most appropriate kernel
149
141
(q , Q ) = self .compute_kernel_coeff (sam_vel = np .linalg .norm (sam_vel_odom ),
@@ -244,32 +236,33 @@ def find_cells2update(self):
244
236
the "projected radius" (which depends on height of flight and camer FOV).
245
237
246
238
"""
247
- if self .drone_pos_odom_gt is not None :
248
- x_cells , y_cells = [], []
249
- x , y = self .drone_pos_odom_gt .pose .position .x , self .drone_pos_odom_gt .pose .position .y
250
- detection_radius = tan (self .camera_fov / 2 )* self .drone_pos_odom_gt .pose .position .z
251
- xc_cell , yc_cell = self .find_cell (x_coord = x , y_coord = y ) # drone's current cell
252
-
253
- # create a mask around current cell to avoid iterate over full workspace
254
- encirclements = int (detection_radius / self .resol )+ 1
255
- base_mask = np .ix_ (np .arange (yc_cell - encirclements ,yc_cell + encirclements + 1 ,1 ), np .arange (xc_cell - encirclements ,xc_cell + encirclements + 1 ,1 ))
256
-
257
- # remove cells that lie outside the workspace
258
- base_mask_y_filtered = np .extract ((0 <= base_mask [0 ]) & (base_mask [0 ] <= self .Ncells_y - 1 ), base_mask [0 ])
259
- base_mask_x_filtered = np .extract ((0 <= base_mask [1 ]) & (base_mask [1 ] <= self .Ncells_x - 1 ), base_mask [1 ])
260
- base_mask_filtered = (base_mask_y_filtered .reshape (base_mask_y_filtered .shape [0 ],1 ), base_mask_x_filtered )
261
-
262
- # apply distance and time criteria
263
- sub_X , sub_Y , sub_Time = self .X [base_mask_filtered ], self .Y [base_mask_filtered ], self .map_Time [base_mask_filtered ]
264
- condition_mask = (
265
- ((np .power (sub_X - x , 2 ) + np .power (sub_Y - y , 2 )) < detection_radius ** 2 )
266
- & ((self .get_clock ().now ().nanoseconds ) - sub_Time > self .time_margin )
267
- )
268
- condition_rows , condition_cols = np .where (condition_mask )
269
- rows = condition_rows + yc_cell - encirclements
270
- columns = condition_cols + xc_cell - encirclements
271
-
272
- return rows , columns
239
+
240
+ x_cells , y_cells = [], []
241
+ drone_pos_odom , height = self .transform_point (self .drone_position )
242
+ x , y = drone_pos_odom [0 ], drone_pos_odom [1 ]
243
+ detection_radius = tan (self .camera_fov / 2 )* height
244
+ xc_cell , yc_cell = self .find_cell (x_coord = x , y_coord = y ) # drone's current cell
245
+
246
+ # create a mask around current cell to avoid iterate over full workspace
247
+ encirclements = int (detection_radius / self .resol )+ 1
248
+ base_mask = np .ix_ (np .arange (yc_cell - encirclements ,yc_cell + encirclements + 1 ,1 ), np .arange (xc_cell - encirclements ,xc_cell + encirclements + 1 ,1 ))
249
+
250
+ # remove cells that lie outside the workspace
251
+ base_mask_y_filtered = np .extract ((0 <= base_mask [0 ]) & (base_mask [0 ] <= self .Ncells_y - 1 ), base_mask [0 ])
252
+ base_mask_x_filtered = np .extract ((0 <= base_mask [1 ]) & (base_mask [1 ] <= self .Ncells_x - 1 ), base_mask [1 ])
253
+ base_mask_filtered = (base_mask_y_filtered .reshape (base_mask_y_filtered .shape [0 ],1 ), base_mask_x_filtered )
254
+
255
+ # apply distance and time criteria
256
+ sub_X , sub_Y , sub_Time = self .X [base_mask_filtered ], self .Y [base_mask_filtered ], self .map_Time [base_mask_filtered ]
257
+ condition_mask = (
258
+ ((np .power (sub_X - x , 2 ) + np .power (sub_Y - y , 2 )) < detection_radius ** 2 )
259
+ & ((self .get_clock ().now ().nanoseconds ) - sub_Time > self .time_margin )
260
+ )
261
+ condition_rows , condition_cols = np .where (condition_mask )
262
+ rows = condition_rows + yc_cell - encirclements
263
+ columns = condition_cols + xc_cell - encirclements
264
+
265
+ return rows , columns
273
266
274
267
def find_cell (self , x_coord , y_coord ):
275
268
""" Maps (x,y) coordinates to cell coordinates """
@@ -302,15 +295,18 @@ def transform_point(self, point:PointStamped) -> np.array:
302
295
target_frame = self .drone_odom_frame_id ,
303
296
source_frame = point .header .frame_id ,
304
297
time = rclpy .time .Time () )
305
- # goal = PointStamped()
306
- # goal.header.stamp = t.header.stamp
307
- # goal.header.frame_id = self.drone_map_frame_id
308
- # goal.point.x = point[0]
309
- # goal.point.y = point[1]
310
- # goal.point.z = self.flight_height
311
298
new_point = tf2_geometry_msgs .do_transform_point (point , t )
312
- self .get_logger ().info (f"GPS ping in quadrotor's odom = { (round (float (new_point .point .x ),2 ), round (float (new_point .point .y ),2 ))} " )
313
- return np .array ([new_point .point .x , new_point .point .y ])
299
+ return np .array ([new_point .point .x , new_point .point .y ]), new_point .point .z
300
+
301
+ def transform_vector (self , vector :Vector3Stamped ) -> np .array :
302
+ """Convert points in map frame to odom frame"""
303
+ t = self .tf_buffer .lookup_transform (
304
+ target_frame = self .drone_odom_frame_id ,
305
+ source_frame = vector .header .frame_id ,
306
+ time = rclpy .time .Time () )
307
+ new_vector = tf2_geometry_msgs .do_transform_vector3 (vector , t )
308
+ sam_vel_odom = [new_vector .vector .x , new_vector .vector .y ]
309
+ return np .array (sam_vel_odom )
314
310
315
311
def initiatePrior (self , GPS_ping_odom :np .array ):
316
312
""" Initiate Gaussian prior in the odom_gt frame"""
@@ -365,25 +361,16 @@ def map_probabilities(self, data):
365
361
slope = (new_max - new_min )/ (max - min )
366
362
return np .int8 (slope * (data - max ) + new_max )
367
363
368
-
369
- def drone_odom_callback (self , msg ):
370
- """ Retrieve drone position (in map_gt) and tranform to init_drone_frame and to Quadrotor/odom_gt (for occupancy grid)"""
371
-
372
- # transform to /Quadrotor/odom_gt
373
- t = self .tf_buffer .lookup_transform (
374
- target_frame = self .drone_odom_frame_id ,
375
- source_frame = self .drone_map_frame_id ,
376
- time = rclpy .time .Time (),
377
- timeout = Duration (seconds = 1 ))
378
- pose_map = PoseStamped ()
379
- pose_map .header .stamp = t .header .stamp
380
- pose_map .header .frame_id = msg .header .frame_id # should be map_gt
381
- pose_map .pose = msg .pose .pose
382
- self .drone_pos_odom_gt = tf2_geometry_msgs .do_transform_pose_stamped (pose_map , t )
383
-
384
364
385
- def sam_odom_callback (self , msg ):
365
+ def sam_odom_callback (self , msg : Odometry ):
386
366
""" Retrieve SAM position (map_gt)"""
387
- self .sam_vel = np .array ([msg .twist .twist .linear .x , msg .twist .twist .linear .y ])
388
- #self.get_logger().info(f'SAM velocity: {self.sam_vel}')
367
+ self .sam_vel .vector .x = msg .twist .twist .linear .x
368
+ self .sam_vel .vector .y = msg .twist .twist .linear .y
369
+ self .sam_vel .vector .z = msg .twist .twist .linear .z
370
+ self .sam_vel .header = msg .header #np.array([msg.twist.twist.linear.x, msg.twist.twist.linear.y])
371
+
372
+ def drone_odom_callback (self , msg : Odometry ):
373
+ """ Retrieve drone position (map_gt)"""
374
+ self .drone_position .point = msg .pose .pose .position
375
+ self .drone_position .header = msg .header
389
376
0 commit comments