目录
第一部分 提升
提升的概念
梯度提升方法 GBDT
梯度提升算法推导
总结
XGBoost
决策树的描述
正则项的定义
XGBoost算法的推导
总结
Adaboost
举例
e2是误分点的权重之和
总结
第二部分 代码示例
# /usr/bin/python
# -*- encoding:utf-8 -*-
import xgboost as xgb
import numpy as np
# 1、xgBoost的基本使用
# 2、自定义损失函数的梯度和二阶导
# 3、binary:logistic/logitraw
# 定义f: theta * x
def log_reg(y_hat, y):
p = 1.0 / (1.0 + np.exp(-y_hat))
g = p - y.get_label()
h = p * (1.0-p)
return g, h
def error_rate(y_hat, y):
return 'error', float(sum(y.get_label() != (y_hat > 0.5))) / len(y_hat)
if __name__ == "__main__":
# 读取数据
data_train = xgb.DMatrix('agaricus_train.txt')
data_test = xgb.DMatrix('agaricus_test.txt')
print data_train
print type(data_train)
# 设置参数
param = {
'max_depth': 3, 'eta': 1, 'silent': 1, 'objective': 'binary:logistic'} # logitraw
# param = {'max_depth': 3, 'eta': 0.3, 'silent': 1, 'objective': 'reg:logistic'}
watchlist = [(data_test, 'eval'), (data_train, 'train')]
n_round = 7
# bst = xgb.train(param, data_train, num_boost_round=n_round, evals=watchlist)
bst = xgb.train(param, data_train, num_boost_round=n_round, evals=watchlist, obj=log_reg, feval=error_rate)
# 计算错误率
y_hat = bst.predict(data_test)
y = data_test.get_label()
print y_hat
print y
error = sum(y != (y_hat > 0.5))
error_rate = float(error) / len(y_hat)
print '样本总数