K-D树主要是为了实现机器学习算法中的K近邻算法,单纯的K-D树只能实现最近邻,但是结合优先队列就可以实现K近邻了,这里只是把K-D树简单的实现了一下,经过简单测试,暂时没有发现重大bug。
#ifndef KDTREE_H
#define KDTREE_H
#include <cassert>
#include <algorithm>
#include <cstddef>
#include <vector>
#include <cmath>
#include <iostream>
using ::std::vector;
using ::std::cout;
using ::std::endl;
namespace sx {
typedef float DataType;
typedef unsigned int UInt;
struct Feature {
vector<DataType> data;
int id;
Feature() {}
Feature(const vector<DataType> & d, int i)
: data(d), id(i) {}
} /* optional variable list */;
template <UInt K>
class KDTree {
public:
KDTree();
virtual ~KDTree();
KDTree(const KDTree & rhs);
const KDTree & operator = (const KDTree & rhs);
void Clean();
void Build(const vector<Feature> & matrix_feature);
int FindNearestFeature(const Feature & target) const;
int FindNearestFeature(const Feature & target,
DataType & min_difference) const;
void Show() const;
private:
struct KDNode {
KDNode * left;
KDNode * right;
Feature feature;
int depth;
KDNode(const Feature & f, KDNode * lt, KDNode * rt, int d)
: feature(f), left(lt), right(rt), depth(d) {}
} /* optional variable list */;
KDNode * root_;
struct Comparator {
int index_comparator;
Comparator(int ix)
: index_comparator(ix) {}
bool operator () (const Feature & lhs, const Feature & rhs) {
return lhs.data[index_comparator] < rhs.data[index_comparator];
}
} /* optional variable list */;
KDNode * Clone(KDNode * t) const;
void Clean(KDNode * & t);
void SortFeature(vector<Feature> & features, int index);
void Build(const vector<Feature> & matrix_feature,
KDNode * & t, int depth);
DataType Feature2FeatureDifference(const F