K折交叉验证

一、自己实现:切片

# k折交叉验证 : 自己实现
k = 5
per_group_samples = len(X) // k
for i in range(k):
    X_test = X[i * per_group_samples: (i+1) * per_group_samples]
    y_test = y[i * per_group_samples: (i+1) * per_group_samples]
    # axis=0:行操作   axis=1:列操作
    X_train = np.concatenate((X[: i * per_group_samples], X[(i+1) * per_group_samples:]), axis=0)
    y_train = np.concatenate((y[: i * per_group_samples], y[(i+1) * per_group_samples:]), axis=0)

二、直接调用:随机下标

# k折交叉验证 : 直接调用
kf = KFold(n_splits=5)
for train_index, test_index in kf.split(X):
    X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]
# 使用随机梯度下降进行k折交叉验证
kf = KFold(n_splits=5)
for k, (train_index, test_index) in enumerate(kf.split(X)):
    X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]

    clf = SGDRegressor(max_iter=1000, tol=1e-3)
    clf.fit(X_train, y_train)

    score_train = mean_squared_error(y_train, clf.predict(X_train))
    score_test = mean_squared_error(y_test, clf.predict(X_test))
    print(k, " 折 SGDRegressor train MSE:    ", score_train)
    print(k, " 折 SGDRegressor test MSE:    ", score_test)

三、完整代码

from sklearn.model_selection import KFold
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn.linear_model import SGDRegressor
import numpy as np


# 加载数据
iris = load_iris()
X = iris.data
y = iris.target

# k折交叉验证 : 自己实现
k = 5
per_group_samples = len(X) // k
for i in range(k):
    X_test = X[i * per_group_samples: (i+1) * per_group_samples]
    y_test = y[i * per_group_samples: (i+1) * per_group_samples]
    # axis=0:行操作   axis=1:列操作
    X_train = np.concatenate((X[: i * per_group_samples], X[(i+1) * per_group_samples:]), axis=0)
    y_train = np.concatenate((y[: i * per_group_samples], y[(i+1) * per_group_samples:]), axis=0)

# k折交叉验证 : 直接调用
kf = KFold(n_splits=5)
for train_index, test_index in kf.split(X):
    X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]

# 使用随机梯度下降进行k折交叉验证
kf = KFold(n_splits=5)
for k, (train_index, test_index) in enumerate(kf.split(X)):
    X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]

    clf = SGDRegressor(max_iter=1000, tol=1e-3)
    clf.fit(X_train, y_train)

    score_train = mean_squared_error(y_train, clf.predict(X_train))
    score_test = mean_squared_error(y_test, clf.predict(X_test))
    print(k, " 折 SGDRegressor train MSE:    ", score_train)
    print(k, " 折 SGDRegressor test MSE:    ", score_test)
  • 2
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值