2020吴恩达 machine learning 编程作业 python实现 ex6

# -*- coding: utf-8 -*-
"""
Created on Thu Jul  2 20:13:57 2020

@author: cheetah023
"""
import numpy as np
import scipy.io as sci
import matplotlib.pyplot as plt
from sklearn import svm

#函数定义
def plotData(X, y):
    pos = np.where(y == 1)
    neg = np.where(y == 0)
    plt.scatter(X[pos[0],0],X[pos[0],1],marker='o',c='g')
    plt.scatter(X[neg[0],0],X[neg[0],1],marker='x',c='r')
def visualizeBoundary(X,model):
    x1_max = np.max(X[:,0])
    x1_min = np.min(X[:,0])
    x2_max = np.max(X[:,1])
    x2_min = np.min(X[:,1])
    x1_t = np.linspace(x1_min, x1_max, 1000)
    x2_t = np.linspace(x2_min, x2_max, 1000)
    #生成网格型数据,可以接受两个一维数组生成两个二维矩阵,对应两个数组中所有的(x,y)对
    x1,x2 = np.meshgrid(x1_t, x2_t)
    p = model.predict(np.c_[x1.flatten(), x2.flatten()])
    p = p.reshape(x1.shape)
    plt.contour(x1, x2, p)
def gaussianKernel(x1, x2, sigma):
    x1 = np.reshape(x1,[-1,1])
    x2 = np.reshape(x2,[-1,1])
    sim = np.exp(-np.sum((x1-x2) ** 2) / (2 * sigma * sigma))
    return sim
def dataset3Params(X, y, Xval, yval):
    list_vec = [0.01,0.03,0.1,0.3,1,3,10,30]
    C = 0.01
    sigma = 0.01
    score = 0
    for C_t in list_vec:
        for sigma_t in list_vec:
            gamma_t = 1 / (2 * sigma_t * sigma_t)
            model = svm.SVC(C = C_t,kernel='rbf',gamma=gamma_t)
            model.fit(X, y.flatten())
            score_t = model.score(Xval,yval.flatten())
            if score_t > score:
                score = score_t
                sigma = sigma_t
                C = C_t
    return C, sigma
#Part 1: Loading and Visualizing Data
data = sci.loadmat('ex6data1.mat')
#print(data.keys())
X = data['X']
y = data['y']
print('X1:',X.shape)
print('y1:',y.shape)
plt.figure(0)
plotData(X, y)

#Part 2: Training Linear SVM
C_t = 1
#使用的svm里面自己的核函数
model = svm.SVC(C = C_t,kernel='linear')
model.fit(X, y.flatten())
visualizeBoundary(X,model)
plt.title('SVM Decision Boundary data1  C = 1')

#Part 3: Implementing Gaussian Kernel
x1 = [1, 2, 1]
x2 = [0, 4, -1]
sigma = 2
sim = gaussianKernel(x1, x2, sigma)
print('Gaussian Kernel:',sim)
print('(for sigma = 2, this value should be about 0.324652)')

#Part 4: Visualizing Dataset 2
data = sci.loadmat('ex6data2.mat')
#print(data.keys())
X = data['X']
y = data['y']
print('X2:',X.shape)
print('y2:',y.shape)
plt.figure(1)
plotData(X, y)

#Part 5: Training SVM with RBF Kernel (Dataset 2)
C_t = 1
sigma = 0.1
gamma_t = 1 / (2 * sigma * sigma)
#使用svm自带的高斯核函数
model = svm.SVC(C = C_t,kernel='rbf',gamma=gamma_t)
model.fit(X, y.flatten())
visualizeBoundary(X,model)
plt.title('SVM Decision Boundary data2  C = 1 sigma = 0.1')

#Part 6: Visualizing Dataset 3
data = sci.loadmat('ex6data3.mat')
#print(data.keys())
X = data['X']
y = data['y']
Xval = data['Xval']
yval = data['yval']
print('X3:',X.shape)
print('y3:',y.shape)
plt.figure(2)
plotData(X, y)

#Part 7: Training SVM with RBF Kernel (Dataset 3)
[C_t, sigma] = dataset3Params(X, y, Xval, yval)
gamma_t = 1 / (2 * sigma * sigma)
model = svm.SVC(C = C_t,kernel='rbf',gamma=gamma_t)
model.fit(X, y.flatten())
visualizeBoundary(X,model)
plt.title('SVM Decision Boundary data3  C = {} sigma = {}'.format(C_t,sigma))

运行结果:

X1: (51, 2)
y1: (51, 1)
Gaussian Kernel: 0.32465246735834974
(for sigma = 2, this value should be about 0.324652)
X2: (863, 2)
y2: (863, 1)
X3: (211, 2)
y3: (211, 1)

 

参考资料:

https://blog.csdn.net/weixin_44027820/article/details/104592429

总结:

1、主要是学习了sklearn 里面svm自带的核函数的用法

 

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值