PCA-手写体识别实例



#-----------------------------------------------------
import  matplotlib.pyplot as plt
from   sklearn import datasets
digits=datasets.load_digits()
x=digits.data
y=digits.target
#-------------------------------------------------------------
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.2)
print(x_train.shape)
from sklearn.neighbors import KNeighborsClassifier
knn_clf=KNeighborsClassifier()
knn_clf.fit(x_train,y_train)
print("normal:",knn_clf.score(x_test,y_test))
#-------------------------------------------------------------
from sklearn.decomposition import PCA
pca=PCA(n_components=2)
pca.fit(x_train)
x_train_reduction=pca.transform(x_train)
x_test_reduction =pca.transform(x_test)
#------------------------------------------------
knn_clf=KNeighborsClassifier()
knn_clf.fit(x_train_reduction,y_train)
print("reduction:",knn_clf.score(x_test_reduction,y_test))
#---------------------------------------------------------------
pca=PCA()
pca.fit(x_train)
plt.figure()
plt.plot([i for i in range(x_train.shape[1])],
          [np.sum(pca.explained_variance_ratio_[:i+1]) for i in range(x_train.shape[1])])
plt.show()
#---------------------------------------------------------------
pca=PCA(0.95)
pca.fit(x_train)
print(pca.n_components_)

pca=PCA(n_components=2)
pca.fit(x)
x_reduction=pca.transform(x)
print(pca.explained_variance_ratio_)
plt.figure()
for i in range(10):
#    print(y==i)
#    print(x_reduction[y==i,0])
    plt.scatter(x_reduction[y==i,0],x_reduction[y==i,1],alpha=0.8)
#plt.plot([i for i in range(x_train.shape[1])],
#          [np.sum(pca.explained_variance_ratio_[:i+1]) for i in range(x_train.shape[1])])
plt.show()

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值