LR代码:
import numpy as np
from sklearn.metrics import accuracy_score
class LogisticRegression(object):
def __init__(self):
"""初始化Logistic Regression模型"""
self.coef = None
self.intercept = None
self._theta = None
def sigmoid(self, t):
return 1. / (1. + np.exp(-t))
def fit(self, X_train, y_train, alpha=0.01, n_iters=1e4):
"""使用梯度下降法训练LR模型"""
assert X_train.shape[0] == y_train.shape[0] #判断长度是否相等
def J(theta, X_b, y):
y_hat = self.sigmoid(X_b.dot(theta))
try:
return -np.sum(y * np.log(y_hat) + (1-y) * np.log(1 - y_hat))
except:
return float('inf')
def dJ(theta, X_b, y):
#求导后公式
return X_b.T.dot(self.sigmoid(X_b.dot(theta)) - y) / len(y)
def gradient