IRLS的简单例子

from numpy import array, diag, dot, maximum, empty, repeat, ones, sum
from numpy.linalg import inv
from sklearn import datasets
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error

def IRLS(X, y, maxiter, w_init=1, d=0.0001, tol=0.0001):
    nSample, nDim = X.shape
    delta = array(repeat(d,nSample)).reshape(1,nSample)
    w = repeat(1,nSample)
    W = diag(w)
    B = inv(X.T @ W @ X) @ X.T @ W @ y
    for _ in range(maxiter):
        _B = B
        _w = abs(y-X @ B).T
        w = float(1.0) / maximum(delta, _w)
        W = diag(w[0])
        B = inv(X.T @ W @ X) @ X.T @ W @ y
        if sum(abs(B-_B)) < tol:
            return B
    return B

# X, y = datasets.load_iris(return_X_y=True)
X, y = datasets.load_boston(return_X_y=True)

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=3)

B = IRLS(X_train,y_train,maxiter=500)
y_hat = X_test @ B
MSE = mean_squared_error(y_true=y_test, y_pred=y_hat)
print("IRLS::",MSE)

model = LinearRegression()
model.fit(X_train, y_train)
y_hat = model.predict(X=X_test)
MSE = mean_squared_error(y_true=y_test, y_pred=y_hat)
print("LR::",MSE)



# print(abs(y - X @ B))




Python Implementation of Iterative Reweighted Least Square of Logistic Regression

https://github.com/jaeho3690/LogisiticRegression

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

DeniuHe

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值