算法-线性回归,岭回归

from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LinearRegression, SGDRegressor, Ridge
from sklearn.metrics import mean_squared_error





def mylinear():
    # 获取数据
    lb = load_boston()
    # 分割数据集到训练集和测试集
    x_train, x_test, y_train, y_test = train_test_split(lb.data, lb.target, test_size=0.25)

    # 进行标准化处理(?) 目标值处理?
    sd_x = StandardScaler()
    x_train = sd_x.fit_transform(x_train)
    x_test = sd_x.transform(x_test)

    # 特征值和目标值是都必须进行标准化处理, 实例化两个标准化API
    sd_y = StandardScaler()
    y_train = sd_y.fit_transform(y_train.reshape(-1, 1))
    y_test = sd_y.transform(y_test.reshape(-1, 1))

    lr = LinearRegression()
    lr.fit(x_train, y_train)
    print(lr.coef_)
    y_lr_predict = sd_y.inverse_transform(lr.predict(x_test))
    print(y_lr_predict)
    print('正规方程的均方误差:', mean_squared_error(sd_y.inverse_transform(y_test), y_lr_predict))

    sgd = SGDRegressor()
    sgd.fit(x_train, y_train)
    y_sgd_predict = sd_y.inverse_transform(sgd.predict(x_test))
    print('梯度下降的均方误差:', mean_squared_error(sd_y.inverse_transform(y_test), y_sgd_predict))

    ri = Ridge()
    ri.fit(x_train, y_train)
    y_ri_predict = sd_y.inverse_transform(ri.predict(x_test))
    print('梯度下降的均方误差:', mean_squared_error(sd_y.inverse_transform(y_test), y_ri_predict))
    return None

mylinear()

公式推导相关:

​​​​​​​

logistic回归:


​​​​​​​

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值