Skip to content

Commit d73c9c9

Browse files
authored
Merge pull request #178 from KKalem/humble
Alars search
2 parents fad5d52 + 3637c98 commit d73c9c9

File tree

10 files changed

+364
-228
lines changed

10 files changed

+364
-228
lines changed
Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
1-
#request
2-
geometry_msgs/PointStamped gps
1+
#request
2+
geographic_msgs/GeoPoint gps
3+
4+
float64 radius
5+
6+
float64 initial_altitude
37

4-
geometry_msgs/PointStamped quadrotor_ipos
58
---
69
#response
710
bool success

path_planning/alars_auv_search_planner/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ If the user wants to integrate this in a broader task, two clients have to be cr
3333
## **(ROS) Srv**
3434
| Service name | Service Type | Description |
3535
| --- | ---| ---|
36-
| 'init_auv_search' | InitAUVSearch | It initiates the **PGM**. It should be requested when the drone is +or- within the desired workspace and search planning is about to initiate. It needs the GPS ping and the initial drone position in any frame connected to Quadrotor's odom|
36+
| 'init_auv_search' | InitAUVSearch | It initiates the **PGM**. It should be requested when the search planning is about to initiate. It needs the search radius around the GPS ping, the GPS ping (GeoPoint msg) and the drone's initial altitude wrt to the water level. It can be called several times in a single simulation since the planner and the grid map are reinitialized when this service is requested|
3737
| 'get_quadrotor_path' | DronePath | It computes a path based on the **PGM** and returns it as a PoseArray |
3838
---
3939
Check *test_initmap_srv.py* and *test_getpath_srv.py* to see how the client can be set up.

path_planning/alars_auv_search_planner/alars_auv_search_planner/path_planners.py

Lines changed: 49 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
import numpy as np
3636
from math import cos, sin, pi, sqrt, tan, factorial, dist
3737
from .Astar import AStar
38+
from .prob_grid_map import ProbabilisticGridMap
3839

3940
class InitializeActions(Node):
4041
"""
@@ -65,21 +66,18 @@ def __init__(self, name = 'init_actions', params = None):
6566
if params:
6667
self.drone_init_xy = params["drone.init_pos"]
6768
self.sam_init_pos = params["sam.init_pos"]
68-
self.sam_pos_var= params["sam.initial_state.pos_variance"]
69-
self.flight_height = params["flight_height"]
70-
self.drone_map_frame_id = params['frames.id.quadrotor_map']
69+
self.sam_pos_var= params["sam.init_pos_variance"]
70+
self.flight_height = params["drone.flight_height"]
71+
self.map_frame_id = params['frames.id.map']
7172
self.drone_odom_frame_id = params['frames.id.quadrotor_odom']
73+
self.sam_odom_frame_id = params['frames.id.sam_odom']
7274
else:
7375
self.get_logger().error("No valid parameters received in SearchPlanner node")
7476

7577
self.teleport_sam_publisher = self.create_publisher(
7678
msg_type = PoseStamped,
7779
topic = '/sam_auv_v1/teleport',
7880
qos_profile= 10)
79-
self.teleport_sam_publisher = self.create_publisher(
80-
msg_type = PoseStamped,
81-
topic = '/Quadrotor/teleport',
82-
qos_profile= 10)
8381

8482
self.sam_odom_callback = self.create_subscription(
8583
msg_type = Odometry,
@@ -105,7 +103,7 @@ def teleport_sam(self):
105103
in SAM's odom frame.
106104
"""
107105
msg = PoseStamped()
108-
msg.header.frame_id = 'sam_auv_v1/odom_gt'
106+
msg.header.frame_id = self.drone_odom_frame_id # CHECK
109107
msg.header.stamp = self.get_clock().now().to_msg()
110108
msg.pose.position.x = self.sam_init_pos[0]
111109
msg.pose.position.y = self.sam_init_pos[1]
@@ -130,11 +128,11 @@ def get_GPSxy_ping(self) -> PointStamped :
130128
It's not a real GPS measurement.
131129
"""
132130
if self.sam_pos is not None:
133-
cov = [[self.sam_pos_var, 0], [0, self.sam_pos_var]]
131+
cov = [[self.sam_pos_var, 0], [0, self.sam_pos_var]] #TODO: change to sam_pos
134132
X = np.random.multivariate_normal(self.sam_init_pos[0:2], cov)
135133
GPS_ping = PointStamped()
136134
GPS_ping.header.stamp = self.get_clock().now().to_msg()
137-
GPS_ping.header.frame_id = self.drone_map_frame_id
135+
GPS_ping.header.frame_id = self.map_frame_id
138136
GPS_ping.point.x = X[0]
139137
GPS_ping.point.y = X[1]
140138
return GPS_ping
@@ -176,7 +174,7 @@ class SearchPlanner(Node, ABC):
176174
Notes:
177175
178176
"""
179-
def __init__(self, name="pathplanner_parent", params = None, grid_map = None):
177+
def __init__(self, name="pathplanner_parent", params = None, grid_map:ProbabilisticGridMap = None):
180178
super().__init__(node_name = name)
181179
self.get_logger().info('Parent search planner initialized')
182180

@@ -187,6 +185,7 @@ def __init__(self, name="pathplanner_parent", params = None, grid_map = None):
187185
self.battery_state = None
188186
self.drone_position = PointStamped()
189187
self.drone_vel = None
188+
self.sam_position = PointStamped()
190189
self.sam_vel = None
191190
self.distance_thresh = 0.1
192191

@@ -197,17 +196,18 @@ def __init__(self, name="pathplanner_parent", params = None, grid_map = None):
197196

198197
if params:
199198
self.planner_type = params["path_planner"]
200-
self.camera_fov = params["camera_fov"]
201-
self.flight_height = params["flight_height"]
202-
self.lat = params["look_ahead_time"]
203-
self.intermediate_dt = params["intermediate_dt"]
199+
self.camera_fov = params["drone.camera_fov"]
200+
self.flight_height = params["drone.flight_height"]
201+
self.lat = params["drone.look_ahead_time"]
202+
self.intermediate_dt = params["drone.intermediate_dt"]
204203

205204
self.battery_discharge_rate = params['battery.discharge_rate']
206205
self.battery_threshold = params['battery.threshold']
207206
self.equivalent_drone_vel = params['battery.equivalent_drone_vel']
208207

209-
self.drone_map_frame_id = params['frames.id.quadrotor_map']
208+
self.map_frame_id = params['frames.id.map']
210209
self.drone_odom_frame_id = params['frames.id.quadrotor_odom']
210+
self.sam_odom_frame_id = params['frames.id.sam_odom']
211211
else:
212212
self.get_logger().error("No valid parameters received in Path Model")
213213

@@ -232,12 +232,16 @@ def __init__(self, name="pathplanner_parent", params = None, grid_map = None):
232232

233233
self.point_publisher = self.create_publisher(
234234
msg_type = PoseStamped,
235-
topic = '/Quadrotor/go_to_setpoint',
235+
topic = params['topics.move_drone'],
236236
qos_profile= 10)
237237
self.path_publisher = self.create_publisher( # visualization purposes only
238238
msg_type = Path,
239239
topic = '/Quadrotor/path',
240240
qos_profile= 10)
241+
self.sam_pos_publisher = self.create_publisher( # visualization purposes only
242+
msg_type = PointStamped,
243+
topic = '/sam_auv_v1/position',
244+
qos_profile= 10)
241245

242246

243247
@abstractmethod
@@ -260,11 +264,12 @@ def generate_waypoint(self, x, y, z):
260264
current_pos_odom = self.transform_point(self.drone_position)
261265
return current_pos_odom.point.x, current_pos_odom.point.y
262266

263-
def transform_point(self, point:PointStamped) -> PointStamped:
267+
def transform_point(self, point:PointStamped, frame:str = None) -> PointStamped:
268+
if frame is None: frame = self.drone_odom_frame_id
264269
# transform desired position (relative position) to odom frame
265270
t = self.tf_buffer.lookup_transform(
266-
target_frame = self.drone_odom_frame_id,
267-
source_frame = point.header.frame_id, #point.header.frame_id, #self.drone_map_frame_id,
271+
target_frame = frame,
272+
source_frame = point.header.frame_id, #point.header.frame_id, #self.map_frame_id,
268273
time=rclpy.time.Time() )
269274
return tf2_geometry_msgs.do_transform_point(point, t)
270275

@@ -303,7 +308,6 @@ def battery_ok(self, path) -> bool:
303308

304309
def publish_path(self) -> None:
305310
""" Publish path for visualization in rviz """
306-
self.get_logger().info(f'Path = {self.path}')
307311
path_msg = Path()
308312
path_msg.header.stamp = self.get_clock().now().to_msg()
309313
path_msg.header.frame_id = self.drone_odom_frame_id
@@ -362,18 +366,35 @@ def return_to_base(self):
362366
self.point_publisher.publish(pose_msg)
363367
odom_position = self.transform_point(self.drone_position)
364368
return odom_position.point.x, odom_position.point.y
369+
370+
def reinitialize_planner(self):
371+
"""
372+
Reinitializes grid map and planner state (called when client makes requests a new service and
373+
the search planning has to be reinitiated without destroying the node)
374+
"""
375+
if self.params['path_planner'] == 'spiral': self.phase = 'line'
376+
self.path_needed = True
377+
self.path_completed = False
378+
self.wait_finished = True
379+
self.grid_map.initiate_grid_map()
380+
365381

366382

367-
def drone_odom_callback(self, msg):
368-
""" Retrieve drone position (map_gt)"""
369-
self.drone_position.point = msg.pose.pose.position #np.array([msg.pose.pose.position.x, msg.pose.pose.position.y])
383+
def drone_odom_callback(self, msg: Odometry):
384+
""" Retrieve drone position (currently odometry gives in map_gt)"""
385+
self.drone_position.point = msg.pose.pose.position
370386
self.drone_position.header.stamp = self.get_clock().now().to_msg()
371-
self.drone_position.header.frame_id = self.drone_map_frame_id #msg.header.frame_id -> check if i can use this one
387+
self.drone_position.header.frame_id = msg.header.frame_id
372388
self.drone_vel = np.array([msg.twist.twist.linear.x, msg.twist.twist.linear.y])
373389

374-
def sam_odom_callback(self, msg):
375-
""" Retrieve SAM's velocity from odometry"""
390+
def sam_odom_callback(self, msg: Odometry):
391+
""" Retrieve SAM's velocity from odometry and publish it's position for visualization purposes"""
376392
self.sam_vel = np.array([msg.twist.twist.linear.x, msg.twist.twist.linear.y])
393+
self.sam_position.point = msg.pose.pose.position
394+
self.sam_position.header = msg.header
395+
self.sam_pos_publisher.publish(self.sam_position)
396+
397+
377398

378399
def drone_battery_callback(self, msg):
379400
""" Retrieve current battery percentage"""
@@ -867,7 +888,7 @@ def generate_path(self) -> bool:
867888

868889
# if mode = real, the next path is generated as soon service receives request. If mode = sim, we use distance feeback to
869890
# determine when to publish next path
870-
if self.params['mode'] == 'real': self.path_needed = True
891+
self.path_needed = True if self.params['mode'] == 'real' else False
871892

872893

873894
elif not self.path_needed and not self.path_completed:

path_planning/alars_auv_search_planner/alars_auv_search_planner/prob_grid_map.py

Lines changed: 57 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,8 @@ def __init__(self, name = "SearchPlanner_gridmap", params:dict = None, GPS_ping:
4242

4343
self.tf_buffer = Buffer()
4444
self.tf_listener = TransformListener(self.tf_buffer, self)
45+
self.drone_position = PointStamped()
46+
self.sam_vel = Vector3Stamped()
4547

4648
if params:
4749

@@ -54,11 +56,12 @@ def __init__(self, name = "SearchPlanner_gridmap", params:dict = None, GPS_ping:
5456
self.beta = params["grid_map.update.true_detection_rate"]
5557
self.time_margin = params["grid_map.update.time_margin"]*1e9 #nanoseconds
5658

57-
self.camera_fov = (pi/180)*params["camera_fov"]
58-
self.flight_height = params["flight_height"]
59+
self.camera_fov = (pi/180)*params["drone.camera_fov"]
60+
self.flight_height = params["drone.flight_height"]
5961

60-
self.drone_map_frame_id = params['frames.id.quadrotor_map']
62+
self.map_frame_id = params['frames.id.map']
6163
self.drone_odom_frame_id = params['frames.id.quadrotor_odom']
64+
self.sam_odom_frame_id = params['frames.id.sam_odom']
6265

6366
else:
6467
self.get_logger().error("No parameters received")
@@ -86,8 +89,7 @@ def initiate_grid_map(self) -> None:
8689
"""
8790

8891
# create grid map without probabilities (just its dimensions) as class attributes and ros msg.
89-
self.GPS_ping_odom = self.transform_point(self.GPS_ping)
90-
self.get_logger().info(f'Received GPS ping in odom (x,y) = {round(float(self.GPS_ping_odom[0]),2), round(float(self.GPS_ping_odom[1]),2)}')
92+
self.GPS_ping_odom, _ = self.transform_point(self.GPS_ping)
9193
self.createMap(self.GPS_ping_odom)
9294

9395
# gaussian prior, scale probabilities to fit int -> viewing purposes only
@@ -132,18 +134,8 @@ def kernel(self):
132134
The kernel is a 3*3 matrix with bigger values on the elements that are aligned with SAM's velocity direction.
133135
Eg: if SAM's velocity = (0,1), then the top-central element of the kernel will be significantly high
134136
"""
135-
# Transform sam velocity to odom frame #TODO: check frame of SAM's Odometry
136-
t = self.tf_buffer.lookup_transform(
137-
target_frame=self.drone_odom_frame_id,
138-
source_frame=self.drone_map_frame_id,
139-
time=rclpy.time.Time())
140-
vec = Vector3Stamped()
141-
vec.header.frame_id = self.drone_map_frame_id
142-
vec.header.stamp = self.get_clock().now().to_msg()
143-
vec.vector.x = self.sam_vel[0]
144-
vec.vector.y = self.sam_vel[1]
145-
result = tf2_geometry_msgs.do_transform_vector3(vec, t)
146-
sam_vel_odom = [result.vector.x, result.vector.y]
137+
138+
sam_vel_odom = self.transform_vector(self.sam_vel)
147139

148140
# Compute weights by mapping SAM's velocity to closest base vector and retrieving most appropriate kernel
149141
(q, Q) = self.compute_kernel_coeff(sam_vel=np.linalg.norm(sam_vel_odom),
@@ -244,32 +236,33 @@ def find_cells2update(self):
244236
the "projected radius" (which depends on height of flight and camer FOV).
245237
246238
"""
247-
if self.drone_pos_odom_gt is not None:
248-
x_cells, y_cells = [], []
249-
x, y = self.drone_pos_odom_gt.pose.position.x, self.drone_pos_odom_gt.pose.position.y
250-
detection_radius = tan(self.camera_fov/2)*self.drone_pos_odom_gt.pose.position.z
251-
xc_cell, yc_cell = self.find_cell(x_coord = x, y_coord = y) # drone's current cell
252-
253-
# create a mask around current cell to avoid iterate over full workspace
254-
encirclements = int(detection_radius/self.resol)+1
255-
base_mask = np.ix_(np.arange(yc_cell-encirclements,yc_cell+encirclements+1,1), np.arange(xc_cell-encirclements,xc_cell+encirclements+1,1))
256-
257-
# remove cells that lie outside the workspace
258-
base_mask_y_filtered = np.extract((0 <= base_mask[0]) & (base_mask[0] <= self.Ncells_y -1), base_mask[0])
259-
base_mask_x_filtered = np.extract((0 <= base_mask[1]) & (base_mask[1] <= self.Ncells_x -1), base_mask[1])
260-
base_mask_filtered = (base_mask_y_filtered.reshape(base_mask_y_filtered.shape[0],1), base_mask_x_filtered )
261-
262-
# apply distance and time criteria
263-
sub_X, sub_Y, sub_Time = self.X[base_mask_filtered], self.Y[base_mask_filtered], self.map_Time[base_mask_filtered]
264-
condition_mask = (
265-
((np.power(sub_X - x, 2) + np.power(sub_Y - y, 2)) < detection_radius ** 2)
266-
& ((self.get_clock().now().nanoseconds) - sub_Time > self.time_margin)
267-
)
268-
condition_rows, condition_cols = np.where(condition_mask)
269-
rows = condition_rows + yc_cell-encirclements
270-
columns = condition_cols + xc_cell-encirclements
271-
272-
return rows, columns
239+
240+
x_cells, y_cells = [], []
241+
drone_pos_odom, height = self.transform_point(self.drone_position)
242+
x, y = drone_pos_odom[0], drone_pos_odom[1]
243+
detection_radius = tan(self.camera_fov/2)*height
244+
xc_cell, yc_cell = self.find_cell(x_coord = x, y_coord = y) # drone's current cell
245+
246+
# create a mask around current cell to avoid iterate over full workspace
247+
encirclements = int(detection_radius/self.resol)+1
248+
base_mask = np.ix_(np.arange(yc_cell-encirclements,yc_cell+encirclements+1,1), np.arange(xc_cell-encirclements,xc_cell+encirclements+1,1))
249+
250+
# remove cells that lie outside the workspace
251+
base_mask_y_filtered = np.extract((0 <= base_mask[0]) & (base_mask[0] <= self.Ncells_y -1), base_mask[0])
252+
base_mask_x_filtered = np.extract((0 <= base_mask[1]) & (base_mask[1] <= self.Ncells_x -1), base_mask[1])
253+
base_mask_filtered = (base_mask_y_filtered.reshape(base_mask_y_filtered.shape[0],1), base_mask_x_filtered )
254+
255+
# apply distance and time criteria
256+
sub_X, sub_Y, sub_Time = self.X[base_mask_filtered], self.Y[base_mask_filtered], self.map_Time[base_mask_filtered]
257+
condition_mask = (
258+
((np.power(sub_X - x, 2) + np.power(sub_Y - y, 2)) < detection_radius ** 2)
259+
& ((self.get_clock().now().nanoseconds) - sub_Time > self.time_margin)
260+
)
261+
condition_rows, condition_cols = np.where(condition_mask)
262+
rows = condition_rows + yc_cell-encirclements
263+
columns = condition_cols + xc_cell-encirclements
264+
265+
return rows, columns
273266

274267
def find_cell(self, x_coord, y_coord):
275268
""" Maps (x,y) coordinates to cell coordinates """
@@ -302,15 +295,18 @@ def transform_point(self, point:PointStamped) -> np.array:
302295
target_frame = self.drone_odom_frame_id,
303296
source_frame = point.header.frame_id,
304297
time=rclpy.time.Time() )
305-
# goal = PointStamped()
306-
# goal.header.stamp = t.header.stamp
307-
# goal.header.frame_id = self.drone_map_frame_id
308-
# goal.point.x = point[0]
309-
# goal.point.y = point[1]
310-
# goal.point.z = self.flight_height
311298
new_point = tf2_geometry_msgs.do_transform_point(point, t)
312-
self.get_logger().info(f"GPS ping in quadrotor's odom = {(round(float(new_point.point.x),2), round(float(new_point.point.y),2))}")
313-
return np.array([new_point.point.x, new_point.point.y])
299+
return np.array([new_point.point.x, new_point.point.y]), new_point.point.z
300+
301+
def transform_vector(self, vector:Vector3Stamped) -> np.array:
302+
"""Convert points in map frame to odom frame"""
303+
t = self.tf_buffer.lookup_transform(
304+
target_frame = self.drone_odom_frame_id,
305+
source_frame = vector.header.frame_id,
306+
time=rclpy.time.Time() )
307+
new_vector = tf2_geometry_msgs.do_transform_vector3(vector, t)
308+
sam_vel_odom = [new_vector.vector.x, new_vector.vector.y]
309+
return np.array(sam_vel_odom)
314310

315311
def initiatePrior(self, GPS_ping_odom:np.array):
316312
""" Initiate Gaussian prior in the odom_gt frame"""
@@ -365,25 +361,16 @@ def map_probabilities(self, data):
365361
slope = (new_max - new_min)/(max - min)
366362
return np.int8(slope*(data - max) + new_max)
367363

368-
369-
def drone_odom_callback(self, msg):
370-
""" Retrieve drone position (in map_gt) and tranform to init_drone_frame and to Quadrotor/odom_gt (for occupancy grid)"""
371-
372-
# transform to /Quadrotor/odom_gt
373-
t = self.tf_buffer.lookup_transform(
374-
target_frame=self.drone_odom_frame_id,
375-
source_frame=self.drone_map_frame_id,
376-
time=rclpy.time.Time(),
377-
timeout = Duration(seconds = 1))
378-
pose_map = PoseStamped()
379-
pose_map.header.stamp = t.header.stamp
380-
pose_map.header.frame_id = msg.header.frame_id # should be map_gt
381-
pose_map.pose = msg.pose.pose
382-
self.drone_pos_odom_gt = tf2_geometry_msgs.do_transform_pose_stamped(pose_map, t)
383-
384364

385-
def sam_odom_callback(self, msg):
365+
def sam_odom_callback(self, msg:Odometry):
386366
""" Retrieve SAM position (map_gt)"""
387-
self.sam_vel = np.array([msg.twist.twist.linear.x, msg.twist.twist.linear.y])
388-
#self.get_logger().info(f'SAM velocity: {self.sam_vel}')
367+
self.sam_vel.vector.x = msg.twist.twist.linear.x
368+
self.sam_vel.vector.y = msg.twist.twist.linear.y
369+
self.sam_vel.vector.z = msg.twist.twist.linear.z
370+
self.sam_vel.header = msg.header #np.array([msg.twist.twist.linear.x, msg.twist.twist.linear.y])
371+
372+
def drone_odom_callback(self, msg: Odometry):
373+
""" Retrieve drone position (map_gt)"""
374+
self.drone_position.point = msg.pose.pose.position
375+
self.drone_position.header = msg.header
389376

0 commit comments

Comments
 (0)