获取前N个主成分
#获取前n个主成分
import numpy as np
import matplotlib.pyplot as plt
产生数据集
X = np.empty((100,2))
X[:,0] = np.random.uniform(0,100,size=100)#产生实数
X[:,1] = 0.75 * X[:,0] + 3. +np.random.normal(0,10,size=100)
首先进行demean操作
#demean
def demean(X):
return X - np.mean(X,axis=0)#1*n向量
X = demean(X)
plt.scatter(X[:,0],X[:,1])
梯度上升法
损失函数
def f(w,X):
return np.sum((X.dot(w) ** 2)) / len(X)
导数
def df(w,X):
return X.T.dot(X.dot(w)) * 2 / len(X)
direction
def direction(w):
return w / np.linalg.norm(w)#求模
第一主成分
def first_component(X,initial_w,eta,n_iters = 1e4,epsilon = 1e-8):
w = direction(initial_w)
cur_iter = 0
while cur_iter < n_iters:
gradient = df(w,X)
last_w = w
w = w + eta * gradient
w = direction(w) #注意 每次w都要求成单位向量
if (np.abs(f(w,X) - f(last_w,X)) < epsilon):
break
cur_iter += 1
return w
随机化w,求第一主成分
initial_w = np.random.random(X.shape[1])
eta = 0.01
w = first_component(X,initial_w,eta)
print(w)#第一主成分
[0.77768061 0.62865958]
求新的数据的第一主成分
第二主成分(向量减法)这里采用for循环计算,也可以向量化
X2 = np.empty(X.shape)
for i in range(len(X)):
X2[i] = X[i] - X[i].dot(w) * w
plt.scatter(X2[:,0],X2[:,1])
w和w2垂直
w2 = first_component(X2,initial_w,eta)
print(w2)
print(w.dot(w2))#非常接近0
[-0.62865512 0.77768422]
5.7376907299233615e-06
向量化
X2 = X - X.dot(w).reshape(-1,1) * w #向量化
求前n个主成分
def first_n_components(n,X,eta = 0.01,n_iters = 1e4,epsilon = 1e-8):
X_pca = X.copy()
X_pca = demean(X_pca)
res = []
for i in range(n):
initial_w = np.random.random(X_pca.shape[1])
w = first_component(X_pca,initial_w,eta)
res.append(w)
X_pca = X_pca - X_pca.dot(w).reshape(-1,1) * w
return res
print(first_n_components(2,X))
[array([0.77768056, 0.62865964]), array([ 0.62866465, -0.77767651])]