导入包
from sklearn.model_selection import train_test_split
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import GridSearchCV #网格搜索
import matplotlib.pyplot as plt#可视化
from sklearn.model_selection import KFold
from sklearn.metrics import r2_score
from sklearn.metrics import mean_squared_error
from scipy.stats import gaussian_kde
import xgboost as xgb
from sklearn.metrics import mean_absolute_error
from sklearn.preprocessing import MinMaxScaler
import pickle
import warnings
warnings.filterwarnings("ignore")
导入数据
dataset = pd.read_excel('../dataset.xlsx')
dataset = dataset.drop('Unnamed: 0',axis=1)
dataset = dataset.dropna(axis=0)
十折交叉验证
# 十折交叉验证封装方法
def kfold_val(dataset,n_estimators,max_depth,learning_rate,min_child_weight):
kf = KFold(n_splits=10,shuffle=True,random_state=True)
for train_i,test_i in kf.split(dataset):
x_train = dataset.iloc[train_i,5:]
y_train=dataset.iloc[train_i,4]
x_test=dataset.iloc[test_i,5:]
y_test = dataset.iloc[test_i,4]
model=xgb.XGBRegressor(n_estimators=n_estimators,max_depth =max_depth,learning_rate=learning_rate,min_child_weight=min_child_weight)
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
mse=mse + mean_squared_error(y_pred,y_test)
r2 =r2 + r2_score(y_pred,y_test)
r2 = r2/10
rmse = np.sqrt(mse)
print('r2',r2)
print('rmse',rmse)
kfold_val(dataset,1600,10,0.1,14)
可以确定好大范围后进行网格调参,最后进行画图
网格调参代码
parameters = {'n_estimators':[......],
'max_depth': [......],
'learning_rate':[.......],
'min_child_weight':[......]
}
model = xgb.XGBRegressor()
gsearch = GridSearchCV(model, param_grid=parameters, cv=2)
gsearch = gsearch.fit(x_train,y_train)
print("Best score: %0.3f" % gsearch.best_score_)
print("Best parameters set:")
best_parameters = gsearch.best_estimator_.get_params()
for param_name in sorted(parameters.keys()):
print("\t%s: %r" % (param_name, best_parameters[param_name]))