### Informed RRT* 算法介绍
Informed RRT* 是一种改进版本的快速随机树 (RRT*) 路径规划算法,旨在提高搜索效率并减少不必要的探索空间。传统 RRT 和 RRT* 可能在高维配置空间中表现出低效性,因为它们会盲目地在整个可行区域内进行采样。
#### 改进之处
通过引入启发式的边界约束条件来限定可能包含最优解的空间区域,从而显著减少了搜索范围。具体来说,在每次迭代时只考虑那些能够潜在改善当前最佳路径成本的新样本点。这种方法不仅加快了收敛速度还提高了最终解决方案的质量[^1]。
#### 应用场景
该算法特别适用于机器人导航领域中的复杂环境建模与实时路径重规划任务。例如:
- **移动机器人**:用于未知或部分已知环境中自主行驶车辆的安全高效路线选择;
- **无人机飞行控制**:帮助无人机构建三维避障轨迹;
- **机械臂操作规划**:支持多自由度关节型设备完成精细动作序列设计;
```python
import numpy as np
from scipy.spatial import cKDTree
def informed_rrt_star(start, goal, obstacles):
"""
实现了一个简化版的 Informed RRT* 算法
参数:
start : 初始位置坐标元组 (x,y)
goal : 终止位置坐标元组 (x,y)
obstacles : 障碍物列表 [(x1,y1,radius), ...]
返回值:
path : 找到的一条从起点到终点的有效路径 [[x0,y0],...,[xn,yn]]
如果找不到则返回 None
"""
class Node(object):
def __init__(self, pos=(0., 0.), parent=None, cost=0.):
self.pos = tuple(pos) # 当前节点的位置
self.parent = parent # 指向父节点的指针
self.cost = float(cost) # 来自根节点的成本累积
tree = [Node(start)] # 初始化一棵只有一个起始结点的树结构
kdtree = cKDTree([n.pos for n in tree]) # 构造 KD Tree 加速最近邻查询过程
best_cost = np.inf # 记录目前为止发现的最佳路径总长度
max_iter = 1e4 # 设置最大允许尝试次数上限
step_size = .5 # 定义单步扩展增量大小(米)
while True and len(tree)<max_iter:
if np.random.rand()>.9 or not hasattr(kdtree,'data'):
q_rand = goal # 直接选取目标作为新顶点候选者之一的概率为 p=0.1
else:
theta = np.random.uniform(0,2*np.pi)
r = min(best_cost,np.sqrt(np.random.exponential()))
dx = r * np.cos(theta); dy = r * np.sin(theta)
center_x = (start[0]+goal[0])/2.
center_y = (start[1]+goal[1])/2.
offset_angle = np.arctan2(goal[1]-start[1],goal[0]-start[0])
rot_matrix = np.array([[np.cos(offset_angle), -np.sin(offset_angle)],
[np.sin(offset_angle), np.cos(offset_angle)]])
delta_pos = np.dot(rot_matrix.T,[[dx],[dy]])
q_rand = (center_x+delta_pos.item(0),
center_y+delta_pos.item(1))
dists,idx=kdtree.query(q_rand,k=1,distance_upper_bound=max(step_size,best_cost))
nearest_node = tree[idx]
new_pos = move_towards(nearest_node.pos,q_rand,min(dists,step_size))
if collision_free(new_pos,obstacles):
near_nodes_idx = list(kdtree.query_ball_point(
new_pos,
radius=np.linalg.norm(np.subtract(goal,new_pos))+step_size
))
candidate_costs=[tree[i].cost+
euclidean_distance(tree[i].pos,new_pos)
for i in near_nodes_idx]
cheapest_nearby_index=near_nodes_idx[np.argmin(candidate_costs)]
new_node = Node(new_pos,parent=cheapest_nearby_index,cost=min(candidate_costs))
update_neighbors(near_nodes_idx,new_node)
tree.append(new_node)
kdtree = cKDTree([node.pos for node in tree])
if within_threshold(new_pos,goal,.1): break
final_path = reconstruct_path(find_closest_to_goal())
return final_path
def find_closest_to_goal():
pass
def move_towards(p_from,p_to,max_dist=.5):
v = unit_vector(vector_between_points(p_from,p_to))*min(max_dist,norm(v))
return add_vectors(p_from,v)
def vector_between_points(a,b):
ax,ay=a; bx,by=b;
return np.array((bx-ax, by-ay))
def norm(x):
from math import hypot
return hypot(*x)
def unit_vector(vec):
mag = norm(vec)
if abs(mag)>1.e-8:return vec/mag
raise ValueError('Zero magnitude vector encountered.')
def add_vectors(u,w):
ux,uy=u; wx,wy=w
return (ux+wx, uy+wy)
def euclidean_distance(a,b):
d = a-b
return sqrt(sum(i*i for i in d))
def update_neighbors(indices,node):
global tree
costs = []
for idx in indices:
neighbor = tree[idx]
tentative_gScore = node.cost + euclidean_distance(node.pos,neighbor.pos)
if tentative_gScore<neighbor.cost:
neighbor.parent=node
neighbor.cost=tentative_gScore
costs.append(tentative_gScore)
else:
costs.append(neighbor.cost)
node.cost=sum(costs)/len(costs)
def within_threshold(point,target,tolerance=.1):
return all(abs(pi-ti)<=tolerance for pi,ti in zip(point,target))
def collision_free(position,obs_list):
px,py=pos
for ox,oy,r in obs_list:
if ((px-ox)**2+(py-oy)**2<r*r):return False
return True
def reconstruct_path(closest_node):
path=[]
current=closest_node
while current is not None:
path.insert(0,current.pos)
current=current.parent
return path
```