利用python完成课程作业ex5,Introduction如下:
In this exercise, you will implement regularized linear regression and use it to study models with dierent bias-variance properties.
代码是与Matlab中的Part相对应的,具体如下(代码整体结构复杂和可读性低,还望见谅):
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 12 11:52:16 2019
@author: Lonely_hanhan
"""
'''
%% Machine Learning Online Class
% Exercise 5 | Regularized Linear Regression and Bias-Variance
%
% Instructions
% ------------
%
% This file contains code that helps you get started on the
% exercise. You will need to complete the following functions:
%
% linearRegCostFunction.m
% learningCurve.m
% validationCurve.m
%
% For this exercise, you will not need to change any code in this file,
% or any other files other than those mentioned above.
%
'''
'''=========== Part 1: Loading and Visualizing Data =============
% We start the exercise by first loading and visualizing the dataset.
% The following code will load the dataset into your environment and plot
% the data.
%
'''
import matplotlib.pyplot as plt
import scipy.io as sio
import numpy as np
import scipy.optimize as op
# Load Training Data
print('Loading and Visualizing Data ...\n')
Data = sio.loadmat('D:\exercise\machine-learning-ex5\machine-learning-ex5\ex5\ex5data1.mat')
x_train = Data['X']
y_train = Data['y']
y_train = y_train.flatten()
Xval = Data['Xval']
yval = Data['yval']
yval = yval.flatten()
Xtest = Data['Xtest']
ytest = Data['ytest']
ytest = ytest.flatten()
m = x_train.shape[0]
plt.plot(x_train, y_train, color='red', linewidth='1.5', marker='x',linestyle='None')
plt.xlabel('Change in water level (x)')
plt.ylabel('Water flowing out of the dam (y)')
#plt.show()
'''
%% =========== Part 2: Regularized Linear Regression Cost =============
% You should now implement the cost function for regularized linear
% regression.
%
'''
def sigmoid(theta_s, x):
z = np.dot(x, theta_s.T)
return z.flatten() #1/(1 + np.exp(-z))
def linearRegCostFunction(theta, X, y, lambda_L):
m = X.shape[0]
X = np.c_[np.ones(m), X]
reg_theta = theta[1:]
J = 1 / (2 * m) * (np.sum((sigmoid(theta, X) - y) * (sigmoid(theta, X) - y)) + \
lambda_L * np.sum(reg_theta * reg_theta))
return J
def gradientReg(theta, X, y, lambda_L):
m = X.shape[0]
X = np.c_[np.ones(m), X]
grad = np.zeros(theta.shape)
X_1 = X[:,0].reshape(X.shape[0],1)
term = (sigmoid(theta, X)-y).T
grad[0] = term @ X_1 / m
grad = grad.T
theta1 = theta.T
grad[1:]= np.dot(term, X[:,1:X.shape[1]]) / m + np.dot(lambda_L, theta1[1:]).T / m
#grad = np.c_[a,b].flatten()
return grad
#theta = np.array(2)
theta = np.array([[1,1]])
J = linearRegCostFunction(theta, x_train, y_train, 1)
grad = gradientReg(theta, x_train, y_train, 1)
print('Cost at theta = [1 ; 1]: %f '\
'\n(this value should be about 303.993192)\n', J)
print('Gradient at