摘要:基于梯度下降的路径规划通过迭代优化路径点,使总代价(路径长度、障碍物距离、平滑度)最小化。步骤如下:
以下为基于梯度下降的路径规划算法的实现步骤及代码示例:
1. 算法概述
基于梯度下降的路径规划通过迭代优化路径点,使总代价(路径长度、障碍物距离、平滑度)最小化。步骤如下:
初始化路径:起点到终点的直线路径。定义代价函数:包含长度项、障碍物项、平滑项。计算梯度:各代价项对路径点的梯度。迭代更新路径:沿负梯度方向更新路径点,直至收敛。2. 代码实现(Python仿真)
python
import numpy as np
import matplotlib.pyplot as plt
class GradientDescentPathPlanner:
def __init__(self, start, goal, num_points, obstacles, weights=(1.0, 1.0, 1.0)):
self.start = np.array(start)
self.goal = np.array(goal)
self.num_points = num_points
self.obstacles = [np.array(obs) for obs in obstacles]
self.weights = weights # [w_length, w_obs, w_smooth]
self.path = self.initialize_path
def initialize_path(self):
# 在起点和终点之间生成均匀路径点(包括端点)
return np.linspace(self.start, self.goal, self.num_points)
def cost_function(self, path):
# 计算总代价
cost_length = self.calc_length_cost(path)
cost_obstacle = self.calc_obstacle_cost(path)
cost_smooth = self.calc_smooth_cost(path)
total_cost = self.weights[0]*cost_length + self.weights[1]*cost_obstacle + self.weights[2]*cost_smooth
return total_cost, (cost_length, cost_obstacle, cost_smooth)
def calc_length_cost(self, path):
# 路径总长度
return sum(np.linalg.norm(path[i+1]-path[i]) for i in range(len(path)-1))
def calc_obstacle_cost(self, path):
# 障碍物惩罚项(指数型)
cost = 0.0
for point in path:
min_dist = min(np.linalg.norm(point - obs) for obs in self.obstacles)
cost += np.exp(-min_dist) # 距离越近,惩罚越大
return cost
def calc_smooth_cost(self, path):
# 平滑项(二阶差分平方和)
cost = 0.0
for i in range(1, len(path)-1):
diff = path[i-1] - 2*path[i] + path[i+1]
cost += np.linalg.norm(diff)**2
return cost
def compute_gradients(self, path):
# 计算各代价项的梯度
grad_length = self.compute_length_gradient(path)
grad_obstacle = self.compute_obstacle_gradient(path)
grad_smooth = self.compute_smooth_gradient(path)
# 加权求和
total_grad = (
self.weights[0] * grad_length +
self.weights[1] * grad_obstacle +
self.weights[2] * grad_smooth
)
return total_grad
def compute_length_gradient(self, path):
# 路径长度梯度
grad = np.zeros_like(path)
for i in range(len(path)):
if i == 0:
# 起点只影响第一段
grad[i] = (path[i] - path[i+1]) / (np.linalg.norm(path[i]-path[i+1]) + 1e-8)
elif i == len(path)-1:
# 终点只影响最后一段
grad[i] = (path[i] - path[i-1]) / (np.linalg.norm(path[i]-path[i-1]) + 1e-8)
else:
# 中间点影响前后两段
grad[i] = (
(path[i] - path[i+1]) / (np.linalg.norm(path[i]-path[i+1]) + 1e-8) +
(path[i] - path[i-1]) / (np.linalg.norm(path[i]-path[i-1]) + 1e-8)
)
return grad
def compute_obstacle_gradient(self, path):
# 障碍物项梯度
grad = np.zeros_like(path)
for i in range(len(path)):
point = path[i]
min_dist = np.inf
closest_obs = None
for obs in self.obstacles:
dist = np.linalg.norm(point - obs)
if dist
min_dist = dist
closest_obs = obs
if closest_obs is not None:
direction = (point - closest_obs) / (min_dist + 1e-8)
grad[i] = -np.exp(-min_dist) * direction
return grad
def compute_smooth_gradient(self, path):
# 平滑项梯度
grad = np.zeros_like(path)
n = len(path)
for j in range(1, n-1):
term = path[j-1] - 2*path[j] + path[j+1]
grad[j-1] += 2 * term
grad[j] += -4 * term
grad[j+1] += 2 * term
return grad
def optimize(self, max_iter=100, lr=0.01, tol=1e-5):
costs =
for it in range(max_iter):
grad = self.compute_gradients(self.path)
# 更新中间点(保持起点和终点固定)
self.path[1:-1] -= lr * grad[1:-1]
# 计算当前代价
total_cost, _ = self.cost_function(self.path)
costs.append(total_cost)
if np.linalg.norm(grad[1:-1])
print(f"Converged at iteration {it}")
break
return self.path, costs
# 仿真示例
if __name__ == "__main__":
start = (0, 0)
goal = (10, 10)
obstacles = [(5, 5), (3, 7), (7, 3)]
num_points = 20
planner = GradientDescentPathPlanner(start, goal, num_points, obstacles, weights=(1, 100, 10))
# 初始路径
initial_path = planner.path.copy
# 优化路径
optimized_path, costs = planner.optimize(max_iter=500, lr=0.05)
# 可视化
plt.figure
plt.scatter(*zip(*obstacles), c='red', label='Obstacles')
plt.plot(*initial_path.T, 'g--', label='Initial Path')
plt.plot(*optimized_path.T, 'b-', label='Optimized Path')
plt.scatter(*start, c='green', marker='s', label='Start')
plt.scatter(*goal, c='red', marker='s', label='Goal')
plt.legend
plt.xlabel('X')
plt.ylabel('Y')
plt.title('Gradient Descent Path Planning')
plt.show
# 绘制代价曲线
plt.figure
plt.plot(costs)
plt.xlabel('Iteration')
plt.ylabel('Total Cost')
plt.title('Cost Convergence')
plt.show
3. 关键步骤说明
初始化路径:起点到终点的直线插值。代价函数:由路径长度、障碍物距离和平滑度组成,权重可调。梯度计算:Ø 长度项:每个路径点的梯度指向相邻点。
Ø 障碍物项:路径点向远离最近障碍物方向调整。
Ø 平滑项:二阶差分惩罚项,减少路径曲率。
迭代优化:沿梯度反方向更新路径点,固定起点和终点。4. ROS C++实现要点
订阅Costmap:获取障碍物信息。路径表示:使用nav_msgs::Path消息。实时更新:结合定时器或传感器回调进行迭代优化。可视化:通过RViz实时显示优化过程。通过调整权重参数和学习率,可平衡路径长度、安全性与平滑度,适应不同场景需求。
来源:老客数据一点号