Python 实现 KD-Tree 最近邻算法

这里将写了一个KDTree类,仅实现了最近邻,K近邻之后若有时间再更新:

from collections import namedtuple
from operator import itemgetter
from pprint import pformat
import numpy as np


class Node(namedtuple('Node', 'location left_child right_child')):
    def __repr__(self):
        return pformat(tuple(self))


class KDTree():
    def __init__(self, points):
        self.tree = self._make_kdtree(points)
        if len(points) > 0:
            self.k = len(points[0])
        else:
            self.k = None

    def _make_kdtree(self, points, depth=0):
        if not points:
            return None

        k = len(points[0])
        axis = depth % k

        points.sort(key=itemgetter(axis))
        median = len(points) // 2

        return Node(
            location=points[median],
            left_child=self._make_kdtree(points[:median], depth + 1),
            right_child=self._make_kdtree(points[median + 1:], depth + 1))

    def find_nearest(self,
                     point,
                     root=None,
                     axis=0,
                     dist_func=lambda x, y: np.linalg.norm(x - y)):

        if root is None:
            root = self.tree
            self._best = None

        # 若不是叶节点,则继续向下走
        if root.left_child or root.right_child:
            new_axis = (axis + 1) % self.k
            if point[axis] < root.location[axis] and root.left_child:
                self.find_nearest(point, root.left_child, new_axis)
            elif root.right_child:
                self.find_nearest(point, root.right_child, new_axis)

        # 回溯:尝试更新 best
        dist = dist_func(root.location, point)
        if self._best is None or dist < self._best[0]:
            self._best = (dist, root.location)

        # 若超球与另一边超矩形相交
        if abs(point[axis] - root.location[axis]) < self._best[0]:
            new_axis = (axis + 1) % self.k
            if root.left_child and point[axis] >= root.location[axis]:
                self.find_nearest(point, root.left_child, new_axis)
            elif root.right_child and point[axis] < root.location[axis]:
                self.find_nearest(point, root.right_child, new_axis)

        return self._best

测试:

point_list = [(2, 3, 3), (5, 4, 4), (9, 6, 7), (4, 7, 7), (8, 1, 1), (7, 2, 2)]
kdtree = KDTree(point_list)

point = np.array([5, 5, 5])
print(kdtree.find_nearest(point))

输出:

(1.4142135623730951, (5, 4, 4))

与 Scikit-Learn 性能对比(上是我的实现,下是 Scikit-Learn 的实现):
1331521-20190320170654201-1994937339.png

1331521-20190320170703983-471788386.png

可以看到仅相差 1 毫秒,所以性能说得过去。

(本文完)

转载于:https://www.cnblogs.com/gscnblog/p/10566157.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值