1、XGBoost原生接口----分类
import numpy as np
from sklearn.datasets import load_iris
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
import xgboost as xgb
data = load_iris()
x = data.data
y = data.target
x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=0, test_size=0.2)
params = {
'eta': 0.1,
'max_depth': 2,
'min_child_weight': 3,
'gamma': 0,
'subsample': 0.8,
'objective': 'multi:softmax', # 多分类
'num_class': 4
}
dtrain = xgb.DMatrix(x_train, y_train)
dtest = xgb.DMatrix(x_test, y_test)
xgbclassifer = xgb.train(params=params, dtrain=dtrain, num_boost_round=100, early_stopping_rounds=5, evals=[(dtrain, 'train'), (dtest, 'test')])
y_pred = xgbclassifer.predict(xgb.DMatrix(x_test))
print(accuracy_score(y_pred, y_test))
[0] train-merror:0.033333 test-merror:0
Multiple eval metrics have been passed: ‘test-merror’ will be used for early stopping.
Will train until test-merror hasn’t improved in 5 rounds.
[1] train-merror:0.041667 test-merror:0
[2] train-merror:0.041667 test-merror:0.033333
[3] train-merror:0.041667 test-merror:0.033333
[4] train-merror:0.041667 test-merror:0.033333
[5] train-merror:0.041667 test-merror:0.033333
Stopping. Best iteration:
[0] train-merror:0.033333 test-merror:0
0.9666666666666667
2、XGBoost的xgboost库接口----分类
import numpy as np
from xgboost import XGBClassifier
from sklearn.datasets import load_iris
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
import xgboost as xgb
data = load_iris()
x = data.data
y = data