模型选型
模型调参
1 模型选型
采用了xgboost和lightGBM以及它俩的加权模型
2 模型调参
主要对叶子节点数,学习率以及估计器参数进行调整
def xgb_model_fit(self,
X_train, X_test, y_train, y_test,alg, useTrainCV=True, cv_folds=5, early_stopping_rounds=50):
if useTrainCV:
"""训练集训练数据"""
xgb_param = alg.get_xgb_params()
xgtrain = xgb.DMatrix(X_train, label=y_train)
cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], nfold=cv_folds,
metrics='mae', early_stopping_rounds=early_stopping_rounds)
alg.set_params(n_estimators=cvresult.shape[0])
# 拟合模型
alg.fit(X_train, y_train, eval_metric='mae')
# 预测训练集、测试集
train_data_df_predictions = alg.predict(X_train)
test_data_df_predictions = alg.predict(X_test)
# 回归问题评价标--训练集
print("training mean_absolute_error is : " )
print(mean_absolute_error(y_train, train_data_df_predictions))
#测试集
print("test mean_absolute_error is : ")
print(mean_absolute_error(y_test, test_data_df_predictions))
#特征重要度
plt.ylabel('Feature Importance is')
plot_importance(alg)
plt.show()
def bak_log(self,x_list):
return list(map(lambda x:pow(2.72,x),x_list))
def light_gbm_model_fit(self,X_train, X_test, y_train, y_test):
gbm = lgb.LGBMRegressor(objective='regression', num_leaves=175, learning_rate=0.05, n_estimators=20) #num_leaves=31
gbm.fit(X_train, y_train, eval_set=[(X_test, y_test)], eval_metric='l1', early_stopping_rounds=5)
print('Start lightgbm predicting...')
# 训练集与测试集预测
y_train_pred = gbm.predict(X_train, num_iteration=gbm.best_iteration_)
y_test_pred = gbm.predict(X_test, num_iteration=gbm.best_iteration_)
# 模型评估
print('The y_train mae of test prediction is:', mean_absolute_error(DataSearch().bak_log(y_train), DataSearch().bak_log(y_train_pred)))
print('The y_test mae of test prediction is:', mean_absolute_error(DataSearch().bak_log(y_test), DataSearch().bak_log(y_test_pred)))
# feature importances
print('Feature importances:', list(gbm.feature_importances_))
# 网格搜索,参数优化
estimator = lgb.LGBMRegressor(num_leaves=175, metrics='mae', max_depth=7, min_child_samples=1000) #num_leaves=64
param_grid = {
'learning_rate': [0.01, 0.1, 1],
'n_estimators': [20, 40]
}
gbm_grid = GridSearchCV(estimator, param_grid)
gbm_grid = gbm_grid.fit(X_train, y_train)
print("用网格搜索的方式开始进行预测")
print('Best parameters found by grid search are:', gbm_grid.best_params_)
# 训练集与测试集预测
y_train_pred = gbm_grid.predict(X_train)
y_test_pred = gbm_grid.predict(X_test)
# 模型评估
print('grid search cv The y_train mae of test prediction is:', mean_absolute_error(y_train, y_train_pred))
print('grid search cv The y_test mae of test prediction is:', mean_absolute_error(y_test, y_test_pred))
# feature importances
print('Feature importances:', list(gbm_grid.feature_importances_))
结果
The bak log test prediction is: 776.484369
待续