参考文章:
- 算法实现
#!/usr/bin/env python
# coding: utf-8
# ## LLE算法实现
# In[2]:
'''
LLE 2019.11.20
Author: luo
Reference:
Zhihua Zhou. Machine learning[M]. Tsinghua University Press, 2016
实现了LLE
总结:对比研究了LLE的在不同近邻参数下的降维效果,并与sklearn中的LLE进行了比较。
同时将降维后的数据应用于softmax训练
'''
import numpy as np
from scipy.spatial.distance import pdist, squareform
import matplotlib.pyplot as plt
'''
define get_k_maxtria function.
Input:
D:numpy.ndarry, size: [num_sample, num_feature]
k: float, the nearest neighbor parameter k
Return:
k_idx: numpy.ndarry, size: [num_sample, k], the index of the k nearest neighbor
'''
def get_k_maxtria(D, k):
dist = pdist(D, 'euclidean') # 获得距离矩阵
dist = squareform(dist) # 转化为方阵
m = dist.shape[0]
k_idx = np.zeros([m, k])
for i in range(m):
topk = np.argsort(dist[i])[1:k + 1] # 从1开始,是因为最小那个距离是它本身, 返回最小的k个的索引
k_idx[i] = k_idx[i] + topk
return k_idx.astype(np.int32)
'''
define get_w function.
Input:
D:numpy.ndarry, size: [num_sample, num_feature]
kear_idx: numpy.ndarry, size: [num_sample, k]
k: float, the nearest neighbor parameter k
Return:
w: numpy.ndarry, size: [num_sample, k], Linear reconstruction coefficient matrix
'''
def get_w(D, knear_idx, k):
m = D.shape[0]
w = np.zeros([m, k])
I = np.ones((k, 1))
for i in range(m):
Q_x = D[knear_idx[i]]
xi = D[i]
xi